Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if use_ele_temp == 1:
te_f = tt
te_a = None
else:
te_f = None
te_a = tt
else :
tt = tt_
te_f = None
te_a = None
for pp in press:
task_name = make_model_devi_task_name(sys_idx[sys_counter], task_counter)
conf_name = make_model_devi_conf_name(sys_idx[sys_counter], conf_counter) + '.lmp'
task_path = os.path.join(work_path, task_name)
# dlog.info(task_path)
create_path(task_path)
create_path(os.path.join(task_path, 'traj'))
loc_conf_name = 'conf.lmp'
os.symlink(os.path.join(os.path.join('..','confs'), conf_name),
os.path.join(task_path, loc_conf_name) )
cwd_ = os.getcwd()
os.chdir(task_path)
try:
mdata["deepmd_version"]
except KeyError:
mdata = set_version(mdata)
deepmd_version = mdata['deepmd_version']
file_c = make_lammps_input(ensemble,
loc_conf_name,
task_model_list,
nsteps,
model_devi_dt,
def _make_fp_vasp_configs(iter_index,
jdata):
fp_task_max = jdata['fp_task_max']
model_devi_skip = jdata['model_devi_skip']
e_trust_lo = 1e+10
e_trust_hi = 1e+10
f_trust_lo = jdata['model_devi_f_trust_lo']
f_trust_hi = jdata['model_devi_f_trust_hi']
type_map = jdata['type_map']
iter_name = make_iter_name(iter_index)
work_path = os.path.join(iter_name, fp_name)
create_path(work_path)
modd_path = os.path.join(iter_name, model_devi_name)
task_min = -1
if os.path.isfile(os.path.join(modd_path, 'cur_job.json')) :
cur_job = json.load(open(os.path.join(modd_path, 'cur_job.json'), 'r'))
if 'task_min' in cur_job :
task_min = cur_job['task_min']
# make configs
fp_tasks = _make_fp_vasp_inner(modd_path, work_path,
model_devi_skip,
e_trust_lo, e_trust_hi,
f_trust_lo, f_trust_hi,
task_min, fp_task_max,
[],
type_map,
return
elif iter_index > 0 and _check_skip_train(model_devi_jobs[iter_index-1]):
log_task('skip training at step %d ' % (iter_index-1))
copy_model(numb_models, iter_index-1, iter_index)
return
else :
iter_name = make_iter_name(iter_index)
work_path = os.path.join(iter_name, train_name)
copy_flag = os.path.join(work_path, 'copied')
if os.path.isfile(copy_flag) :
os.remove(copy_flag)
# establish work path
iter_name = make_iter_name(iter_index)
work_path = os.path.join(iter_name, train_name)
create_path(work_path)
# link init data
cwd = os.getcwd()
os.chdir(work_path)
os.symlink(os.path.abspath(init_data_prefix), 'data.init')
# link iter data
os.mkdir('data.iters')
os.chdir('data.iters')
for ii in range(iter_index) :
os.symlink(os.path.relpath(os.path.join(cwd, make_iter_name(ii))), make_iter_name(ii))
os.chdir(cwd)
init_data_sys = []
init_batch_size = []
if 'init_batch_size' in jdata:
init_batch_size_ = list(jdata['init_batch_size'])
if len(init_data_sys_) > len(init_batch_size_):
raise RuntimeError('invalid setting for use_ele_temp ' + str(use_ele_temp))
# set training reuse model
if training_reuse_iter is not None and iter_index >= training_reuse_iter:
jinput['training']['auto_prob_style'] \
="prob_sys_size; 0:%d:%f; %d:%d:%f" \
%(old_range, training_reuse_old_ratio, old_range, len(init_data_sys), 1.-training_reuse_old_ratio)
if jinput['loss'].get('start_pref_e') is not None:
jinput['loss']['start_pref_e'] = training_reuse_start_pref_e
if jinput['loss'].get('start_pref_f') is not None:
jinput['loss']['start_pref_f'] = training_reuse_start_pref_f
jinput['learning_rate']['start_lr'] = training_reuse_start_lr
jinput['training']['stop_batch'] = training_reuse_stop_batch
for ii in range(numb_models) :
task_path = os.path.join(work_path, train_task_fmt % ii)
create_path(task_path)
os.chdir(task_path)
for jj in init_data_sys :
if not os.path.isdir(jj) :
raise RuntimeError ("data sys %s does not exists, cwd is %s" % (jj, os.getcwd()))
os.chdir(cwd)
# set random seed for each model
if LooseVersion(mdata["deepmd_version"]) < LooseVersion('1'):
# 0.x
jinput['seed'] = random.randrange(sys.maxsize) % (2**32)
else:
# 1.x
jinput['model']['descriptor']['seed'] = random.randrange(sys.maxsize) % (2**32)
jinput['model']['fitting_net']['seed'] = random.randrange(sys.maxsize) % (2**32)
jinput['training']['seed'] = random.randrange(sys.maxsize) % (2**32)
# set model activation function
if model_devi_activation_func is not None:
def init_pick(iter_index, jdata, mdata):
"""pick up init data from dataset randomly"""
pick_data = jdata['pick_data']
init_pick_number = jdata['init_pick_number']
# use MultiSystems with System
# TODO: support System and LabeledSystem
# TODO: support other format
systems = get_systems(pick_data, jdata)
# label the system
labels = []
for key, system in systems.systems.items():
labels.extend([(key, j) for j in range(len(system))])
# random pick
iter_name = make_iter_name(iter_index)
create_path(iter_name)
work_path = os.path.join(iter_name, model_devi_name)
create_path(work_path)
idx = np.arange(len(labels))
np.random.shuffle(idx)
pick_idx = idx[:init_pick_number]
rest_idx = idx[init_pick_number:]
# dump the init data
picked_systems = dpdata.MultiSystems()
for j in pick_idx:
sys_name, sys_id = labels[j]
picked_systems.append(systems[sys_name][sys_id])
sys_data_path = os.path.join(work_path, picked_data_name)
picked_systems.to_deepmd_raw(sys_data_path)
picked_systems.to_deepmd_npy(sys_data_path, set_size=init_pick_number)
os.symlink(os.path.abspath(picked_data_path), os.path.abspath(
os.path.join(work_path, "data.%03d" % 0)))
return
systems = get_systems(picked_data_path, jdata)
fp_style = jdata['fp_style']
if 'user_fp_params' in jdata.keys() :
fp_params = jdata['user_fp_params']
else:
fp_params = jdata['fp_params']
jj = 0
for system in systems:
for subsys in system:
sys_data = subsys.data
task_name = "task.%03d.%06d" % (0, jj)
task_path = os.path.join(work_path, task_name)
create_path(task_path)
if fp_style == "gaussian" :
ret = make_gaussian_input(sys_data, fp_params)
with open(os.path.join(task_path, 'input'), 'w') as fp:
fp.write(ret)
else :
# TODO: support other formats
raise RuntimeError ("unsupported fp style")
jj += 1
# link the model
train_path = os.path.join(iter_name, train_name)
train_path = os.path.abspath(train_path)
models = glob.glob(os.path.join(train_path, "graph*pb"))
for mm in models:
model_name = os.path.basename(mm)
os.symlink(mm, os.path.join(work_path, model_name))
# link the last rest data
last_iter_name = make_iter_name(iter_index-1)
rest_data_path = os.path.join(last_iter_name, model_devi_name, rest_data_name)
if not os.path.exists(rest_data_path):
return False
for jj, subsystem in enumerate(os.listdir(rest_data_path)):
task_name = "task.%03d.%06d" % (0, jj)
task_path = os.path.join(work_path, task_name)
create_path(task_path)
os.symlink(os.path.abspath(os.path.join(rest_data_path, subsystem)),
os.path.abspath(os.path.join(task_path, rest_data_name)))
return True
except KeyError:
mdata = set_version(mdata)
deepmd_version = mdata['deepmd_version']
sys_counter = 0
for ss in conf_systems:
conf_counter = 0
task_counter = 0
for cc in ss :
for ii in range(len(rev_mat)):
rev_item = rev_mat[ii]
task_name = make_model_devi_task_name(sys_idx[sys_counter], task_counter)
conf_name = make_model_devi_conf_name(sys_idx[sys_counter], conf_counter) + '.lmp'
task_path = os.path.join(work_path, task_name)
# create task path
create_path(task_path)
create_path(os.path.join(task_path, 'traj'))
# link conf
loc_conf_name = 'conf.lmp'
os.symlink(os.path.join(os.path.join('..','confs'), conf_name),
os.path.join(task_path, loc_conf_name) )
cwd_ = os.getcwd()
# chdir to task path
os.chdir(task_path)
shutil.copyfile(lmp_templ, 'input.lammps')
# revise input of lammps
with open('input.lammps') as fp:
lmp_lines = fp.readlines()
lmp_lines = revise_lmp_input_model(lmp_lines, task_model_list, trj_freq, deepmd_version = deepmd_version)
lmp_lines = revise_lmp_input_dump(lmp_lines, trj_freq)
lmp_lines = revise_by_keys(lmp_lines, rev_keys[:num_lmp], rev_item[:num_lmp])
# revise input of plumed