Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_transforms_presets_mask_rcnn():
# use valid only, loading training split is very slow
train_dataset = gcv.data.COCOInstance(splits=('instances_val2017',), skip_empty=True)
val_dataset = gcv.data.COCOInstance(splits=('instances_val2017',))
net = gcv.model_zoo.get_model('mask_rcnn_resnet50_v1b_coco', pretrained=False, pretrained_base=False)
net.initialize()
num_workers = 0
short, max_size = 800, 1333
batch_size = 8
train_bfn = batchify.Tuple(*[batchify.Append() for _ in range(6)])
train_loader = mx.gluon.data.DataLoader(
train_dataset.transform(rcnn.MaskRCNNDefaultTrainTransform(short, max_size, net)),
batch_size, True, batchify_fn=train_bfn, last_batch='rollover', num_workers=num_workers)
val_bfn = batchify.Tuple(*[batchify.Append() for _ in range(2)])
val_loader = mx.gluon.data.DataLoader(
val_dataset.transform(rcnn.MaskRCNNDefaultValTransform(short, max_size)),
batch_size, False, batchify_fn=val_bfn, last_batch='keep', num_workers=num_workers)
for loader in [train_loader, val_loader]:
for i, batch in enumerate(loader):
if i > 1:
break
pass
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size, num_workers, args):
"""Get dataloader."""
# when it is not in final_fit stage and val_dataset is not provided, we randomly sample (1 - args.split_ratio) data as our val_dataset
if (not args.final_fit) and (not val_dataset):
train_dataset, val_dataset = _train_val_split(train_dataset, args.split_ratio)
width, height = data_shape, data_shape
batchify_fn = Tuple(*([Stack() for _ in range(6)] + [Pad(axis=0, pad_val=-1) for _ in range(1)])) # stack image, all targets generated
if args.no_random_shape:
train_loader = gluon.data.DataLoader(
train_dataset.transform(YOLO3DefaultTrainTransform(width, height, net, mixup=args.mixup)),
batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
else:
transform_fns = [YOLO3DefaultTrainTransform(x * 32, x * 32, net, mixup=args.mixup) for x in range(10, 20)]
train_loader = RandomTransformDataLoader(
transform_fns, train_dataset, batch_size=batch_size, interval=10, last_batch='rollover',
shuffle=True, batchify_fn=batchify_fn, num_workers=num_workers)
val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
val_loader = None
if val_dataset:
val_loader = gluon.data.DataLoader(
val_dataset.transform(YOLO3DefaultValTransform(width, height)),
batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
def get_dataloader(net, train_dataset, val_dataset, data_shape, batch_size, num_workers, args):
"""Get dataloader."""
width, height = data_shape, data_shape
batchify_fn = Tuple(*([Stack() for _ in range(6)] + [Pad(axis=0, pad_val=-1) for _ in range(1)])) # stack image, all targets generated
if args.no_random_shape:
train_loader = gluon.data.DataLoader(
train_dataset.transform(YOLO3DefaultTrainTransform(width, height, net, mixup=args.mixup)),
batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
else:
print('use random shape')
transform_fns = [YOLO3DefaultTrainTransform(x * 32, x * 32, net, mixup=args.mixup) for x in range(10, 20)]
train_loader = RandomTransformDataLoader(
transform_fns, train_dataset, batch_size=batch_size, interval=10, last_batch='rollover',
shuffle=True, batchify_fn=batchify_fn, num_workers=num_workers)
val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
val_loader = gluon.data.DataLoader(
val_dataset.transform(YOLO3DefaultValTransform(width, height)),
batch_size, False, batchify_fn=val_batchify_fn, last_batch='keep', num_workers=num_workers)
return train_loader, val_loader
def get_dataloader(train_dataset, val_dataset, data_shape, batch_size, num_workers, ctx):
"""Get dataloader."""
width, height = data_shape, data_shape
batchify_fn = Tuple(Stack(), Stack(), Stack(), Stack(), Stack(), Stack()) # stack image, heatmaps, scale, offset, inds, masks
val_batchify_fn = Tuple(Stack(), Pad(pad_val=-1))
'''
# image transformer
img_transform = transforms.Compose([transforms.Resize(640),
transforms.RandomResizedCrop(512, scale=(0.6, 1.3), ratio=(0.75, 1.33)),
transforms.RandomFlipLeftRight(),
transfroms.RandomColorJitter(),
transforms.ToTensor(),
transforms.Normalize(0, 1)])
train_dataset = train_dataset.transform_first(img_transform)
'''
train_loader = gluon.data.DataLoader( train_dataset,
batch_size, True, batchify_fn=batchify_fn, last_batch='rollover', num_workers=num_workers)
val_loader = gluon.data.DataLoader(
val_dataset.transform(SSDDefaultValTransform(width, height)),
def get_dataloader(net, train_dataset, val_dataset, short, max_size, batch_size, num_workers):
"""Get dataloader."""
train_bfn = batchify.Tuple(*[batchify.Append() for _ in range(3)])
train_loader = mx.gluon.data.DataLoader(
train_dataset.transform(FasterRCNNDefaultTrainTransform(short, max_size, net)),
batch_size, True, batchify_fn=train_bfn, last_batch='rollover', num_workers=num_workers)
val_bfn = batchify.Tuple(*[batchify.Append() for _ in range(3)])
val_loader = mx.gluon.data.DataLoader(
val_dataset.transform(FasterRCNNDefaultValTransform(short, max_size)),
batch_size, False, batchify_fn=val_bfn, last_batch='keep', num_workers=num_workers)
return train_loader, val_loader
def get_dataloader(net, val_dataset, batch_size, num_workers):
"""Get dataloader."""
val_bfn = batchify.Tuple(*[batchify.Append() for _ in range(2)])
val_loader = mx.gluon.data.DataLoader(
val_dataset.transform(MaskRCNNDefaultValTransform(net.short, net.max_size)),
batch_size, False, batchify_fn=val_bfn, last_batch='keep', num_workers=num_workers)
return val_loader