Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _iresnet(arch, block, layers, pretrained, progress, **kwargs):
model = IResNet(block, layers, **kwargs)
if pretrained:
state_dict = load_state_dict_from_url(model_urls[arch],
progress=progress)
model.load_state_dict(state_dict)
return model
def __init__(self, block, layers, num_features=512, zero_init_residual=False,
groups=1, width_per_group=64, replace_stride_with_dilation=None):
super(IResNet, self).__init__()
self.inplanes = 64
self.dilation = 1
if replace_stride_with_dilation is None:
# each element in the tuple indicates if we should replace
# the 2x2 stride with a dilated convolution instead
replace_stride_with_dilation = [False, False, False]
if len(replace_stride_with_dilation) != 3:
raise ValueError("replace_stride_with_dilation should be None "
"or a 3-element tuple, got {}".format(replace_stride_with_dilation))
self.groups = groups
self.base_width = width_per_group
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=3, stride=1, padding=1,
bias=False)
self.bn1 = nn.BatchNorm2d(self.inplanes, eps=2e-05, momentum=0.9)
self.prelu = nn.PReLU(self.inplanes)
def iresnet34(pretrained=False, progress=True, **kwargs):
return _iresnet('iresnet34', IBasicBlock, [3, 4, 6, 3], pretrained, progress,
**kwargs)
def __init__(self, inplanes, planes, stride=1, downsample=None, groups=1,
base_width=64, dilation=1):
super(IBasicBlock, self).__init__()
if groups != 1 or base_width != 64:
raise ValueError('BasicBlock only supports groups=1 and base_width=64')
if dilation > 1:
raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
# Both self.conv1 and self.downsample layers downsample the input when stride != 1
self.bn1 = nn.BatchNorm2d(inplanes, eps=2e-05, momentum=0.9)
self.conv1 = conv3x3(inplanes, planes)
self.bn2 = nn.BatchNorm2d(planes, eps=2e-05, momentum=0.9)
self.prelu = nn.PReLU(planes)
self.conv2 = conv3x3(planes, planes, stride)
self.bn3 = nn.BatchNorm2d(planes, eps=2e-05, momentum=0.9)
self.downsample = downsample
self.stride = stride
self.bn2 = nn.BatchNorm2d(512 * block.expansion, eps=2e-05, momentum=0.9)
self.dropout = nn.Dropout2d(p=0.4, inplace=True)
self.fc = nn.Linear(512 * block.expansion * self.fc_scale, num_features)
self.features = nn.BatchNorm1d(num_features, eps=2e-05, momentum=0.9)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
if zero_init_residual:
for m in self.modules():
if isinstance(m, IBasicBlock):
nn.init.constant_(m.bn2.weight, 0)
mtcnn_model_name="Onet",
factor=0.79,
min_face_size=10,
threshold=[0.8,0.8,0.6]):
model=[None,None,None]
if(mtcnn_model_name in ["Pnet","Rnet","Onet"]):
model[0]=MTCNN_model.Pnet_model
if(mtcnn_model_name in ["Rnet","Onet"]):
model[1]=MTCNN_model.Rnet_model
if(mtcnn_model_name=="Onet"):
model[2]=MTCNN_model.Onet_model
self.img_size_list = image_size
self.face_detector = mtcnn_detector.MTCNN_Detector(model,mtcnn_model_path,batch_size,factor,min_face_size,threshold)
self.recognizer = recognizer.Recognizer(arc_model_name, arc_model_path, size_to_predict, self.img_size_list)
self.image_size = str(image_size[0]) + "," + str(image_size[1])
self.database = database
db = pymysql.connect(host=host, user=user, password=password, port=port, charset="utf8" )
self.cursor = db.cursor()
self.cursor.execute("USE %s;"%(database))
self.cursor.execute("ALTER DATABASE %s character SET gbk;"%(database))