How to use the dpgen.generator.lib.utils.make_iter_name function in dpgen

To help you get started, we’ve selected a few dpgen examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github deepmodeling / dpgen / dpgen / generator / run.py View on Github external
def post_fp_pwscf (iter_index,
                   jdata):
    model_devi_jobs = jdata['model_devi_jobs']
    assert (iter_index < len(model_devi_jobs))

    iter_name = make_iter_name(iter_index)
    work_path = os.path.join(iter_name, fp_name)
    fp_tasks = glob.glob(os.path.join(work_path, 'task.*'))
    fp_tasks.sort()
    if len(fp_tasks) == 0 :
        return

    system_index = []
    for ii in fp_tasks :
        system_index.append(os.path.basename(ii).split('.')[1])
    system_index.sort()
    set_tmp = set(system_index)
    system_index = list(set_tmp)
    system_index.sort()

    cwd = os.getcwd()
    for ss in system_index :
github deepmodeling / dpgen / dpgen / generator / run.py View on Github external
def post_fp_gaussian (iter_index,
                      jdata):
    model_devi_jobs = jdata['model_devi_jobs']
    assert (iter_index < len(model_devi_jobs))

    iter_name = make_iter_name(iter_index)
    work_path = os.path.join(iter_name, fp_name)
    fp_tasks = glob.glob(os.path.join(work_path, 'task.*'))
    fp_tasks.sort()
    if len(fp_tasks) == 0 :
        return

    system_index = []
    for ii in fp_tasks :
        system_index.append(os.path.basename(ii).split('.')[1])
    system_index.sort()
    set_tmp = set(system_index)
    system_index = list(set_tmp)
    system_index.sort()

    cwd = os.getcwd()
    for ss in system_index :
github deepmodeling / dpgen / dpgen / generator / run.py View on Github external
max_tasks = 10000
    numb_task = 9
    record = "record.dpgen"
    iter_rec = [0, -1]
    if os.path.isfile (record) :
        with open (record) as frec :
            for line in frec :
                iter_rec = [int(x) for x in line.split()]
        dlog.info ("continue from iter %03d task %02d" % (iter_rec[0], iter_rec[1]))

    cont = True
    ii = -1
    while cont:
        ii += 1
        iter_name=make_iter_name(ii)
        sepline(iter_name,'=')
        for jj in range (numb_task) :
            if ii * max_tasks + jj <= iter_rec[0] * max_tasks + iter_rec[1] :
                continue
            task_name="task %02d"%jj
            sepline("{} {}".format(iter_name, task_name),'-')
            if   jj == 0 :
                log_iter ("make_train", ii, jj)
                make_train (ii, jdata, mdata)
            elif jj == 1 :
                log_iter ("run_train", ii, jj)
                mdata  = decide_train_machine(mdata)
                run_train  (ii, jdata, mdata)
            elif jj == 2 :
                log_iter ("post_train", ii, jj)
                post_train (ii, jdata, mdata)
github deepmodeling / dpgen / dpgen / generator / run.py View on Github external
def post_train (iter_index,
                jdata,
                mdata) :
    # load json param
    numb_models = jdata['numb_models']
    # paths
    iter_name = make_iter_name(iter_index)
    work_path = os.path.join(iter_name, train_name)
    # check if is copied
    copy_flag = os.path.join(work_path, 'copied')
    if os.path.isfile(copy_flag) :
        log_task('copied model, do not post train')
        return
    # symlink models
    for ii in range(numb_models) :
        task_file = os.path.join(train_task_fmt % ii, 'frozen_model.pb')
        ofile = os.path.join(work_path, 'graph.%03d.pb' % ii)
        if os.path.isfile(ofile) :
            os.remove(ofile)
        os.symlink(task_file, ofile)
github deepmodeling / dpgen / dpgen / simplify / simplify.py View on Github external
def make_fp(iter_index, jdata, mdata):
    iter_name = make_iter_name(iter_index)
    work_path = os.path.join(iter_name, fp_name)
    create_path(work_path)
    picked_data_path = os.path.join(iter_name, model_devi_name, picked_data_name)
    if jdata.get("labeled", False):
        dlog.info("already labeled, skip make_fp and link data directly")
        os.symlink(os.path.abspath(picked_data_path), os.path.abspath(
            os.path.join(work_path, "task.%03d" % 0)))
        os.symlink(os.path.abspath(picked_data_path), os.path.abspath(
            os.path.join(work_path, "data.%03d" % 0)))
        return
    systems = get_systems(picked_data_path, jdata)
    fp_style = jdata['fp_style']
    if 'user_fp_params' in jdata.keys() :
        fp_params = jdata['user_fp_params']
    else:
        fp_params = jdata['fp_params']
github deepmodeling / dpgen / dpgen / generator / run.py View on Github external
def run_fp_inner (iter_index,
                  jdata,
                  mdata,
                  forward_files,
                  backward_files,
                  check_fin,
                  log_file = "fp.log",
                  forward_common_files=[]) :
    fp_command = mdata['fp_command']
    fp_group_size = mdata['fp_group_size']
    fp_resources = mdata['fp_resources']
    mark_failure = fp_resources.get('mark_failure', False)

    iter_name = make_iter_name(iter_index)
    work_path = os.path.join(iter_name, fp_name)

    fp_tasks = glob.glob(os.path.join(work_path, 'task.*'))
    fp_tasks.sort()
    if len(fp_tasks) == 0 :
        return

    fp_run_tasks = fp_tasks
    # for ii in fp_tasks :
    #     if not check_fin(ii) :
    #         fp_run_tasks.append(ii)
    run_tasks = [os.path.basename(ii) for ii in fp_run_tasks]
    dispatcher = make_dispatcher(mdata['fp_machine'], mdata['fp_resources'], work_path, run_tasks, fp_group_size)
    dispatcher.run_jobs(mdata['fp_resources'],
                        [fp_command],
                        work_path,
github deepmodeling / dpgen / dpgen / generator / run.py View on Github external
def make_fp_pwscf(iter_index,
                  jdata) :
    # make config
    fp_tasks = _make_fp_vasp_configs(iter_index, jdata)
    if len(fp_tasks) == 0 :
        return
    # make pwscf input
    iter_name = make_iter_name(iter_index)
    work_path = os.path.join(iter_name, fp_name)
    fp_pp_files = jdata['fp_pp_files']
    if 'user_fp_params' in jdata.keys() :
        fp_params = jdata['user_fp_params']
        user_input = True
    else:
        fp_params = jdata['fp_params']
        user_input = False
    cwd = os.getcwd()
    for ii in fp_tasks:
        os.chdir(ii)
        sys_data = dpdata.System('POSCAR').data
        sys_data['atom_masses'] = jdata['mass_map']
        ret = make_pwscf_input(sys_data, fp_pp_files, fp_params, user_input = user_input)
        with open('input', 'w') as fp:
            fp.write(ret)
github deepmodeling / dpgen / dpgen / simplify / simplify.py View on Github external
def init_pick(iter_index, jdata, mdata):
    """pick up init data from dataset randomly"""
    pick_data = jdata['pick_data']
    init_pick_number = jdata['init_pick_number']
    # use MultiSystems with System
    # TODO: support System and LabeledSystem
    # TODO: support other format
    systems = get_systems(pick_data, jdata)
    # label the system
    labels = []
    for key, system in systems.systems.items():
        labels.extend([(key, j) for j in range(len(system))])

    # random pick
    iter_name = make_iter_name(iter_index)
    create_path(iter_name)
    work_path = os.path.join(iter_name, model_devi_name)
    create_path(work_path)
    idx = np.arange(len(labels))
    np.random.shuffle(idx)
    pick_idx = idx[:init_pick_number]
    rest_idx = idx[init_pick_number:]

    # dump the init data
    picked_systems = dpdata.MultiSystems()
    for j in pick_idx:
        sys_name, sys_id = labels[j]
        picked_systems.append(systems[sys_name][sys_id])
    sys_data_path = os.path.join(work_path, picked_data_name)

    picked_systems.to_deepmd_raw(sys_data_path)
github deepmodeling / dpgen / dpgen / generator / run.py View on Github external
if training_reuse_iter is not None and iter_index >= training_reuse_iter:
        training_init_model = True
    try:
        mdata["deepmd_version"]
    except KeyError:
        mdata = set_version(mdata)
    if LooseVersion(mdata["deepmd_version"]) < LooseVersion('1'):
        # 0.x
        deepmd_path = mdata['deepmd_path']
    else:
        # 1.x
        train_command = mdata.get('train_command', 'dp')
    train_resources = mdata['train_resources']

    # paths
    iter_name = make_iter_name(iter_index)
    work_path = os.path.join(iter_name, train_name)
    # check if is copied
    copy_flag = os.path.join(work_path, 'copied')
    if os.path.isfile(copy_flag) :
        log_task('copied model, do not train')
        return
    # make tasks
    all_task = []
    for ii in range(numb_models) :
        task_path = os.path.join(work_path, train_task_fmt % ii)
        all_task.append(task_path)
    commands = []
    if LooseVersion(mdata["deepmd_version"]) < LooseVersion('1'):
        # 0.x
        command = os.path.join(deepmd_path, 'bin/dp_train %s' % train_input_file)
        commands.append(command)
github deepmodeling / dpgen / dpgen / generator / run.py View on Github external
def copy_model(numb_model, prv_iter_index, cur_iter_index) :
    cwd=os.getcwd()
    prv_train_path = os.path.join(make_iter_name(prv_iter_index), train_name)
    cur_train_path = os.path.join(make_iter_name(cur_iter_index), train_name)
    prv_train_path = os.path.abspath(prv_train_path)
    cur_train_path = os.path.abspath(cur_train_path)
    create_path(cur_train_path)
    for ii in range(numb_model) :
        prv_train_task = os.path.join(prv_train_path, train_task_fmt%ii)
        os.chdir(cur_train_path)
        os.symlink(os.path.relpath(prv_train_task), train_task_fmt%ii)
        os.symlink(os.path.join(train_task_fmt%ii, 'frozen_model.pb'), 'graph.%03d.pb' % ii)
        os.chdir(cwd)
    with open(os.path.join(cur_train_path, "copied"), 'w') as fp:
        None