Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _init_param(self, model):
colorlog.critical("[Init General Parameter] >> xavier_uniform_")
for p in model.parameters():
if p.requires_grad:
if len(p.shape)>1:
nn.init.xavier_uniform_(p)
else:
nn.init.constant_(p, 0)
if args.pretrained_word_em_dir:
colorlog.critical("[Pretrained Word em loaded] from {}".format(args.pretrained_word_em_dir))
word_em = np.load(args.pretrained_word_em_dir)
model.word_em_weight.data.copy_(torch.from_numpy(word_em))
def _init_meta_param(self, model):
colorlog.critical("[Init Meta Parameter] >> uniform_ [-0.01, 0.01]")
for name, param in model.meta_param_manager.state_dict().items():
colorlog.info("{} intialized".format(name))
nn.init.uniform_(param, -0.01, 0.01)
def _init_param(self, model):
colorlog.critical("[Init General Parameter] >> xavier_uniform_")
for p in model.parameters():
if p.requires_grad:
if len(p.shape)>1:
nn.init.xavier_uniform_(p)
else:
nn.init.constant_(p, 0)
if args.pretrained_word_em_dir:
colorlog.critical("[Pretrained Word em loaded] from {}".format(args.pretrained_word_em_dir))
word_em = np.load(args.pretrained_word_em_dir)
model.word_em_weight.data.copy_(torch.from_numpy(word_em))