Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
np.random.seed(1)
torch.cuda.manual_seed(1)
# /////////////// Dataset Loading ///////////////
if args.c100:
# mean and standard deviation of channels of CIFAR-10 images
mean = [x / 255 for x in [125.3, 123.0, 113.9]]
std = [x / 255 for x in [63.0, 62.1, 66.7]]
train_transform = trn.Compose([trn.RandomHorizontalFlip(), trn.RandomCrop(32, padding=4),
trn.ToTensor(), trn.Normalize(mean, std)])
test_transform = trn.Compose([trn.ToTensor(), trn.Normalize(mean, std)])
train_data = dset.CIFAR100('/share/data/vision-greg/cifarpy', train=True, transform=train_transform, download=False)
test_data = dset.CIFAR100('/share/data/vision-greg/cifarpy', train=False, transform=test_transform, download=False)
num_classes = 100
else:
train_data = dset.ImageFolder('/share/data/vision-greg/DistortedImageNet/Icons-50',
transform=trn.Compose([trn.Resize((32, 32)), trn.RandomHorizontalFlip(),
trn.RandomCrop(32, padding=4), trn.ToTensor(),
# RandomErasing()
]))
test_data = dset.ImageFolder('/share/data/vision-greg/DistortedImageNet/Icons-50',
transform=trn.Compose([trn.Resize((32, 32)), trn.ToTensor()]))
num_classes = 50
if args.traditional:
filtered_imgs = []
for img in train_data.samples:
train_transforms = tv.transforms.Compose([
tv.transforms.RandomCrop(32, padding=4),
tv.transforms.RandomHorizontalFlip(),
tv.transforms.ToTensor(),
tv.transforms.Normalize(mean=mean, std=stdv),
])
test_transforms = tv.transforms.Compose([
tv.transforms.ToTensor(),
tv.transforms.Normalize(mean=mean, std=stdv),
])
# Split training into train and validation - needed for calibration
#
# IMPORTANT! We need to use the same validation set for temperature
# scaling, so we're going to save the indices for later
train_set = tv.datasets.CIFAR100(data, train=True, transform=train_transforms, download=True)
valid_set = tv.datasets.CIFAR100(data, train=True, transform=test_transforms, download=False)
indices = torch.randperm(len(train_set))
train_indices = indices[:len(indices) - valid_size]
valid_indices = indices[len(indices) - valid_size:] if valid_size else None
# Make dataloaders
train_loader = torch.utils.data.DataLoader(train_set, pin_memory=True, batch_size=batch_size,
sampler=SubsetRandomSampler(train_indices))
valid_loader = torch.utils.data.DataLoader(valid_set, pin_memory=True, batch_size=batch_size,
sampler=SubsetRandomSampler(valid_indices))
# Make model, criterion, and optimizer
model = DenseNet(
growth_rate=growth_rate,
block_config=block_config,
num_classes=100
target_transform=None, download=True):
train = (split == 'train')
if name == 'mnist':
return datasets.MNIST( root=_dataset_path['mnist'],
train=train,
transform=transform,
target_transform=target_transform,
download=download)
elif name == 'cifar10':
return datasets.CIFAR10(root=_dataset_path['cifar10'],
train=train,
transform=transform,
target_transform=target_transform,
download=download)
elif name == 'cifar100':
return datasets.CIFAR100(root=_dataset_path['cifar100'],
train=train,
transform=transform,
target_transform=target_transform,
download=download)
elif name == 'imagenet':
path = _dataset_path[name][split]
return datasets.ImageFolder(root=path,
transform=transform,
target_transform=target_transform)
std = [x / 255 for x in [68.2, 65.4, 70.4]]
else:
assert False, "Unknow dataset : {}".format(args.dataset)
train_transform = transforms.Compose(
[transforms.RandomHorizontalFlip(), transforms.RandomCrop(32, padding=4), transforms.ToTensor(),
transforms.Normalize(mean, std)])
test_transform = transforms.Compose(
[transforms.ToTensor(), transforms.Normalize(mean, std)])
if args.dataset == 'cifar10':
train_data = dset.CIFAR10(args.data_path, train=True, transform=train_transform, download=True)
test_data = dset.CIFAR10(args.data_path, train=False, transform=test_transform, download=True)
num_classes = 10
elif args.dataset == 'cifar100':
train_data = dset.CIFAR100(args.data_path, train=True, transform=train_transform, download=True)
test_data = dset.CIFAR100(args.data_path, train=False, transform=test_transform, download=True)
num_classes = 100
elif args.dataset == 'svhn':
train_data = dset.SVHN(args.data_path, split='train', transform=train_transform, download=True)
test_data = dset.SVHN(args.data_path, split='test', transform=test_transform, download=True)
num_classes = 10
elif args.dataset == 'stl10':
train_data = dset.STL10(args.data_path, split='train', transform=train_transform, download=True)
test_data = dset.STL10(args.data_path, split='test', transform=test_transform, download=True)
num_classes = 10
elif args.dataset == 'imagenet':
assert False, 'Do not finish imagenet code'
else:
assert False, 'Do not support dataset : {}'.format(args.dataset)
train_loader = torch.utils.data.DataLoader(train_data, batch_size=args.batch_size, shuffle=True,
def build_cifar100(model_state_dict=None, optimizer_state_dict=None, **kwargs):
epoch = kwargs.pop('epoch')
ratio = kwargs.pop('ratio')
train_transform, valid_transform = utils._data_transforms_cifar10(args.cutout_size)
train_data = dset.CIFAR100(root=args.data, train=True, download=True, transform=train_transform)
valid_data = dset.CIFAR100(root=args.data, train=True, download=True, transform=valid_transform)
num_train = len(train_data)
assert num_train == len(valid_data)
indices = list(range(num_train))
split = int(np.floor(ratio * num_train))
np.random.shuffle(indices)
train_queue = torch.utils.data.DataLoader(
train_data, batch_size=args.batch_size,
sampler=torch.utils.data.sampler.SubsetRandomSampler(indices[:split]),
pin_memory=True, num_workers=16)
valid_queue = torch.utils.data.DataLoader(
valid_data, batch_size=args.eval_batch_size,
sampler=torch.utils.data.sampler.SubsetRandomSampler(indices[split:num_train]),
pin_memory=True, num_workers=16)
])
else:
train_transform = transforms.Compose([
transforms.ToTensor(),
normalize
])
# load the dataset
if name == 'cifar10':
train_dataset = datasets.CIFAR10(root=data_dir, train=True,
download=True, transform=train_transform)
valid_dataset = datasets.CIFAR10(root=data_dir, train=True,
download=True, transform=valid_transform)
else:
train_dataset = datasets.CIFAR100(root=data_dir, train=True,
download=True, transform=train_transform)
valid_dataset = datasets.CIFAR100(root=data_dir, train=True,
download=True, transform=valid_transform)
num_train = len(train_dataset)
indices = list(range(num_train))
split = int(np.floor(valid_size * num_train))
if shuffle == True:
np.random.seed(random_seed)
np.random.shuffle(indices)
train_idx, valid_idx = indices[split:], indices[:split]
train_sampler = SubsetRandomSampler(train_idx)
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
if args.dataset == 'cifar10':
dataloader = datasets.CIFAR10
num_classes = 10
else:
dataloader = datasets.CIFAR100
num_classes = 100
trainset = dataloader(root='./data', train=True, download=True, transform=transform_train)
trainloader = data.DataLoader(trainset, batch_size=args.train_batch, shuffle=True, num_workers=args.workers)
testset = dataloader(root='./data', train=False, download=False, transform=transform_test)
testloader = data.DataLoader(testset, batch_size=args.test_batch, shuffle=False, num_workers=args.workers)
# Model
print("==> creating model '{}'".format(args.arch))
if args.arch.startswith('resnext'):
model = models.__dict__[args.arch](
cardinality=args.cardinality,
num_classes=num_classes,
depth=args.depth,
transforms.RandomCrop(32, padding=4),
transforms.ToTensor(),
transforms.Normalize(mean, std)])
test_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean, std)])
if d['args'].dataset == 'cifar10':
train_data = dset.CIFAR10(d['args'].data_path, train=True,
transform=train_transform, download=True)
test_data = dset.CIFAR10(d['args'].data_path, train=False,
transform=test_transform, download=True)
else:
train_data = dset.CIFAR100(d['args'].data_path, train=True,
transform=train_transform, download=True)
test_data = dset.CIFAR100(d['args'].data_path, train=False,
transform=test_transform, download=True)
d['train_loader'] = torch.utils.data.DataLoader(train_data,
batch_size=d['args'].batch_size, shuffle=True,
num_workers=d['args'].prefetch,
pin_memory=True if d['args'].ngpu > 0 else False)
d['test_loader'] = torch.utils.data.DataLoader(test_data,
batch_size=d['args'].test_bs, shuffle=False,
num_workers=d['args'].prefetch,
pin_memory=True if d['args'].ngpu > 0 else False)
# Init checkpoints
if not os.path.isdir(d['args'].save):
os.makedirs(d['args'].save)
# make a preemptive forward pass to initialize things
data, _ = next(iter(d['train_loader']))
transforms.ToTensor(),
transforms.Normalize(CIFAR_MEAN, CIFAR_STD)])
test_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(CIFAR_MEAN, CIFAR_STD)])
train_dataset = cifar100Nosiy(root=self.dataPath,
train=True,
transform=train_transform,
download=True,
asym=self.asym,
seed=self.seed,
nosiy_rate=self.noise_rate)
test_dataset = datasets.CIFAR100(root=self.dataPath,
train=False,
transform=test_transform,
download=True)
elif self.dataset_type == 'cifar10':
CIFAR_MEAN = [0.49139968, 0.48215827, 0.44653124]
CIFAR_STD = [0.24703233, 0.24348505, 0.26158768]
train_transform = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(CIFAR_MEAN, CIFAR_STD)])
test_transform = transforms.Compose([
transforms.ToTensor(),
]))
nc = 1
elif opt.model == 'test':
if opt.dataset in 'folder':
# folder dataset
dataset = dset.ImageFolder(root=opt.dataroot,
transform=transform)
nc = 3
elif opt.dataset == 'cifar-10':
dataset = dset.CIFAR10(root=opt.dataroot,
download=True,
train=False,
transform=transform)
nc = 3
elif opt.dataset == 'cifar-100':
dataset = dset.CIFAR100(root=opt.dataroot,
download=True,
train=False,
transform=transform)
nc = 3
elif opt.dataset == 'mnist':
dataset = dset.MNIST(root=opt.dataroot,
download=True,
train=False,
transform=transforms.Compose([
transforms.Resize(opt.imageSize),
transforms.CenterCrop(opt.imageSize),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.5,), (0.5,)),
]))
nc = 1