How to use the albumentations.RGBShift function in albumentations

To help you get started, we’ve selected a few albumentations examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github albumentations-team / albumentations / tests / test_transforms.py View on Github external
A.OneOrOther(
                A.Compose(
                    [
                        A.RandomSizedCrop(min_max_height=(256, 1025), height=512, width=512, p=1),
                        A.OneOf(
                            [
                                A.RandomSizedCrop(min_max_height=(256, 512), height=384, width=384, p=0.5),
                                A.RandomSizedCrop(min_max_height=(256, 512), height=512, width=512, p=0.5),
                            ]
                        ),
                    ]
                ),
                A.Compose(
                    [
                        A.RandomSizedCrop(min_max_height=(256, 1025), height=256, width=256, p=1),
                        A.OneOf([A.HueSaturationValue(p=0.5), A.RGBShift(p=0.7)], p=1),
                    ]
                ),
            ),
            A.HorizontalFlip(p=1),
            A.RandomBrightnessContrast(p=0.5),
        ]
    )

    res = aug(image=np.zeros((1248, 1248, 3), dtype=np.uint8))
    assert res["image"].shape[0] in (256, 384, 512)
    assert res["image"].shape[1] in (256, 384, 512)
github albumentations-team / albumentations / tests / test_serialization.py View on Github external
def test_transform_pipeline_serialization_with_bboxes(seed, image, bboxes, bbox_format, labels):
    aug = A.Compose(
        [
            A.OneOrOther(
                A.Compose([A.RandomRotate90(), A.OneOf([A.HorizontalFlip(p=0.5), A.VerticalFlip(p=0.5)])]),
                A.Compose([A.Rotate(p=0.5), A.OneOf([A.HueSaturationValue(p=0.5), A.RGBShift(p=0.7)], p=1)]),
            ),
            A.HorizontalFlip(p=1),
            A.RandomBrightnessContrast(p=0.5),
        ],
        bbox_params={"format": bbox_format, "label_fields": ["labels"]},
    )
    serialized_aug = A.to_dict(aug)
    deserialized_aug = A.from_dict(serialized_aug)
    set_seed(seed)
    aug_data = aug(image=image, bboxes=bboxes, labels=labels)
    set_seed(seed)
    deserialized_aug_data = deserialized_aug(image=image, bboxes=bboxes, labels=labels)
    assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
    assert np.array_equal(aug_data["bboxes"], deserialized_aug_data["bboxes"])
github albumentations-team / albumentations / tests / test_serialization.py View on Github external
[
                        A.Resize(1024, 1024),
                        A.RandomSizedCrop(min_max_height=(256, 1024), height=512, width=512, p=1),
                        A.OneOf(
                            [
                                A.RandomSizedCrop(min_max_height=(256, 512), height=384, width=384, p=0.5),
                                A.RandomSizedCrop(min_max_height=(256, 512), height=512, width=512, p=0.5),
                            ]
                        ),
                    ]
                ),
                A.Compose(
                    [
                        A.Resize(1024, 1024),
                        A.RandomSizedCrop(min_max_height=(256, 1025), height=256, width=256, p=1),
                        A.OneOf([A.HueSaturationValue(p=0.5), A.RGBShift(p=0.7)], p=1),
                    ]
                ),
            ),
            A.HorizontalFlip(p=1),
            A.RandomBrightnessContrast(p=0.5),
        ]
    )
    serialized_aug = A.to_dict(aug)
    deserialized_aug = A.from_dict(serialized_aug)
    set_seed(seed)
    aug_data = aug(image=image, mask=mask)
    set_seed(seed)
    deserialized_aug_data = deserialized_aug(image=image, mask=mask)
    assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
    assert np.array_equal(aug_data["mask"], deserialized_aug_data["mask"])
github albumentations-team / albumentations / tests / test_serialization.py View on Github external
def test_transform_pipeline_serialization_with_keypoints(seed, image, keypoints, keypoint_format, labels):
    aug = A.Compose(
        [
            A.OneOrOther(
                A.Compose([A.RandomRotate90(), A.OneOf([A.HorizontalFlip(p=0.5), A.VerticalFlip(p=0.5)])]),
                A.Compose([A.Rotate(p=0.5), A.OneOf([A.HueSaturationValue(p=0.5), A.RGBShift(p=0.7)], p=1)]),
            ),
            A.HorizontalFlip(p=1),
            A.RandomBrightnessContrast(p=0.5),
        ],
        keypoint_params={"format": keypoint_format, "label_fields": ["labels"]},
    )
    serialized_aug = A.to_dict(aug)
    deserialized_aug = A.from_dict(serialized_aug)
    set_seed(seed)
    aug_data = aug(image=image, keypoints=keypoints, labels=labels)
    set_seed(seed)
    deserialized_aug_data = deserialized_aug(image=image, keypoints=keypoints, labels=labels)
    assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
    assert np.array_equal(aug_data["keypoints"], deserialized_aug_data["keypoints"])
github ternaus / iglovikov_segmentation / configs / fpn_se_resnext101_32x4d_cityscapes_2V100c.py View on Github external
),
            height=train_parameters["height_crop_size"],
            width=train_parameters["width_crop_size"],
            w2h_ratio=1.0,
            p=1,
        ),
        albu.ShiftScaleRotate(
            border_mode=cv2.BORDER_CONSTANT, rotate_limit=10, scale_limit=0, p=0.5, mask_value=ignore_index
        ),
        albu.RandomBrightnessContrast(p=0.5),
        albu.RandomGamma(p=0.5),
        albu.ImageCompression(quality_lower=20, quality_upper=100, p=0.5),
        albu.GaussNoise(p=0.5),
        albu.Blur(p=0.5),
        albu.CoarseDropout(p=0.5, max_height=26, max_width=16),
        albu.OneOf([albu.HueSaturationValue(p=0.5), albu.RGBShift(p=0.5)], p=0.5),
        normalization,
    ],
    p=1,
)

val_augmentations = albu.Compose(
    [
        albu.PadIfNeeded(
            min_height=1024, min_width=2048, border_mode=cv2.BORDER_CONSTANT, mask_value=ignore_index, p=1
        ),
        normalization,
    ],
    p=1,
)

test_augmentations = albu.Compose([normalization], p=1)
github ternaus / iglovikov_segmentation / configs / fpn_se_resnext101_32x4d_cityscapes_2V100b.py View on Github external
),
            height=train_parameters["height_crop_size"],
            width=train_parameters["width_crop_size"],
            w2h_ratio=1.0,
            p=1,
        ),
        albu.ShiftScaleRotate(
            border_mode=cv2.BORDER_CONSTANT, rotate_limit=10, scale_limit=0, p=0.5, mask_value=ignore_index
        ),
        albu.RandomBrightnessContrast(p=0.5),
        albu.RandomGamma(p=0.5),
        albu.ImageCompression(quality_lower=20, quality_upper=100, p=0.5),
        albu.GaussNoise(p=0.5),
        albu.Blur(p=0.5),
        albu.CoarseDropout(p=0.5, max_height=26, max_width=16),
        albu.OneOf([albu.HueSaturationValue(p=0.5), albu.RGBShift(p=0.5)], p=0.5),
        normalization,
    ],
    p=1,
)

val_augmentations = albu.Compose(
    [
        albu.PadIfNeeded(
            min_height=1024, min_width=2048, border_mode=cv2.BORDER_CONSTANT, mask_value=ignore_index, p=1
        ),
        normalization,
    ],
    p=1,
)

test_augmentations = albu.Compose([normalization], p=1)
github ternaus / iglovikov_segmentation / configs / fpn_se_resnext101_32x4d_cityscapes_2gpu.py View on Github external
),
            height=train_parameters["height_crop_size"],
            width=train_parameters["width_crop_size"],
            w2h_ratio=1.0,
            p=1,
        ),
        albu.ShiftScaleRotate(
            border_mode=cv2.BORDER_CONSTANT, rotate_limit=10, scale_limit=0, p=0.5, mask_value=ignore_index
        ),
        albu.RandomBrightnessContrast(p=0.5),
        albu.RandomGamma(p=0.5),
        albu.ImageCompression(quality_lower=20, quality_upper=100, p=0.5),
        albu.GaussNoise(p=0.5),
        albu.Blur(p=0.5),
        albu.CoarseDropout(p=0.5, max_height=26, max_width=16),
        albu.OneOf([albu.HueSaturationValue(p=0.5), albu.RGBShift(p=0.5)], p=0.5),
        normalization,
    ],
    p=1,
)

val_augmentations = albu.Compose(
    [
        albu.PadIfNeeded(
            min_height=1024, min_width=2048, border_mode=cv2.BORDER_CONSTANT, mask_value=ignore_index, p=1
        ),
        normalization,
    ],
    p=1,
)

test_augmentations = albu.Compose([normalization], p=1)
github catalyst-team / segmentation / src / transforms.py View on Github external
def hard_transform(image_size: int = 256, p: float = 0.5):
    """Hard augmentations"""
    transforms = Compose([
        ShiftScaleRotate(
            shift_limit=0.1,
            scale_limit=0.1,
            rotate_limit=15,
            border_mode=cv2.BORDER_REFLECT,
            p=p,
        ),
        IAAPerspective(scale=(0.02, 0.05), p=p),
        OneOf([
            HueSaturationValue(p=p),
            ToGray(p=p),
            RGBShift(p=p),
            ChannelShuffle(p=p),
        ]),
        RandomBrightnessContrast(
            brightness_limit=0.5, contrast_limit=0.5, p=p
        ),
        RandomGamma(p=p),
        CLAHE(p=p),
        JpegCompression(quality_lower=50, p=p),
    ])
    return transforms
github BloodAxe / Catalyst-Inria-Segmentation-Example / inria / augmentations.py View on Github external
A.RandomGridShuffle(),
            A.ShiftScaleRotate(
                scale_limit=0.1, rotate_limit=45, border_mode=cv2.BORDER_CONSTANT, mask_value=0, value=0
            ),
            A.ElasticTransform(border_mode=cv2.BORDER_CONSTANT, alpha_affine=5, mask_value=0, value=0),
            # Add occasion blur
            A.OneOf([A.GaussianBlur(), A.GaussNoise(), A.IAAAdditiveGaussianNoise(), A.NoOp()]),
            # D4 Augmentations
            A.OneOf([A.CoarseDropout(), A.MaskDropout(max_objects=10), A.NoOp()]),
            # Spatial-preserving augmentations:
            A.OneOf(
                [
                    A.RandomBrightnessContrast(brightness_by_max=True),
                    A.CLAHE(),
                    A.HueSaturationValue(),
                    A.RGBShift(),
                    A.RandomGamma(),
                    A.NoOp(),
                ]
            ),
            # Weather effects
            A.OneOf([A.RandomFog(fog_coef_lower=0.01, fog_coef_upper=0.3, p=0.1), A.NoOp()]),
            A.Normalize(),
        ]