Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"no_handler": []
}
vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)
if isinstance(vasp_cmd, str):
vasp_cmd = os.path.expandvars(vasp_cmd)
vasp_cmd = shlex.split(vasp_cmd)
# initialize variables
job_type = self.get("job_type", "normal")
scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
gzip_output = self.get("gzip_output", True)
max_errors = self.get("max_errors", CUSTODIAN_MAX_ERRORS)
auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False)
gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None)
if gamma_vasp_cmd:
gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)
# construct jobs
if job_type == "normal":
jobs = [VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd)]
elif job_type == "double_relaxation_run":
jobs = VaspJob.double_relaxation_run(vasp_cmd, auto_npar=auto_npar,
ediffg=self.get("ediffg"),
half_kpts_first_relax=self.get("half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX))
elif job_type == "metagga_opt_run":
jobs = VaspJob.metagga_opt_run(vasp_cmd, auto_npar=auto_npar,
ediffg=self.get("ediffg"),
half_kpts_first_relax=self.get("half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX))
elif job_type == "full_opt_run":
"no_handler": []
}
vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)
if isinstance(vasp_cmd, six.string_types):
vasp_cmd = os.path.expandvars(vasp_cmd)
vasp_cmd = shlex.split(vasp_cmd)
# initialize variables
job_type = self.get("job_type", "normal")
scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
gzip_output = self.get("gzip_output", True)
max_errors = self.get("max_errors", 5)
auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False)
gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None)
if gamma_vasp_cmd:
gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)
# construct jobs
if job_type == "normal":
jobs = [VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd)]
elif job_type == "double_relaxation_run":
jobs = VaspJob.double_relaxation_run(vasp_cmd, auto_npar=auto_npar,
ediffg=self.get("ediffg",-0.05),
half_kpts_first_relax=self.get("half_kpts_first_relax",True))
elif job_type == "full_opt_run":
jobs = VaspJob.full_opt_run(vasp_cmd, auto_npar=auto_npar,
ediffg=self.get("ediffg",-0.05),
max_steps=9,
half_kpts_first_relax=self.get("half_kpts_first_relax", True))
elif job_type == "neb":
def run_task(self, fw_spec):
handler_groups = {
"default": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(),
NonConvergingErrorHandler(),PotimErrorHandler(),
PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler()],
"strict": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(),
NonConvergingErrorHandler(),PotimErrorHandler(),
PositiveEnergyErrorHandler(), FrozenJobErrorHandler(),
StdErrHandler(), AliasingErrorHandler()],
"md": [VaspErrorHandler(), NonConvergingErrorHandler()],
"no_handler": []
}
vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)
if isinstance(vasp_cmd, six.string_types):
vasp_cmd = os.path.expandvars(vasp_cmd)
vasp_cmd = shlex.split(vasp_cmd)
# initialize variables
job_type = self.get("job_type", "normal")
scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
gzip_output = self.get("gzip_output", True)
max_errors = self.get("max_errors", 5)
auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False)
gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None)
if gamma_vasp_cmd:
gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)
# construct jobs
def run_task(self, fw_spec):
handler_groups = {
"default": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(),
NonConvergingErrorHandler(),PotimErrorHandler(),
PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler()],
"strict": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(),
NonConvergingErrorHandler(),PotimErrorHandler(),
PositiveEnergyErrorHandler(), FrozenJobErrorHandler(),
StdErrHandler(), AliasingErrorHandler(), DriftErrorHandler()],
"md": [VaspErrorHandler(), NonConvergingErrorHandler()],
"no_handler": []
}
vasp_cmd = env_chk(self["vasp_cmd"], fw_spec)
if isinstance(vasp_cmd, str):
vasp_cmd = os.path.expandvars(vasp_cmd)
vasp_cmd = shlex.split(vasp_cmd)
# initialize variables
job_type = self.get("job_type", "normal")
scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
gzip_output = self.get("gzip_output", True)
max_errors = self.get("max_errors", CUSTODIAN_MAX_ERRORS)
auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False)
gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None)
if gamma_vasp_cmd:
gamma_vasp_cmd = shlex.split(gamma_vasp_cmd)
# construct jobs
def run_task(self, fw_spec):
# load INCAR
incar_name = self.get("input_filename", "INCAR")
incar = Incar.from_file(incar_name)
# process FireWork env values via env_chk
incar_update = env_chk(self.get('incar_update'), fw_spec)
incar_multiply = env_chk(self.get('incar_multiply'), fw_spec)
incar_dictmod = env_chk(self.get('incar_dictmod'), fw_spec)
if incar_update:
incar.update(incar_update)
if incar_multiply:
for k in incar_multiply:
incar[k] = incar[k] * incar_multiply[k]
if incar_dictmod:
apply_mod(incar_dictmod, incar)
# write INCAR
incar.write_file(self.get("output_filename", "INCAR"))
def run_task(self, fw_spec):
notes = self.get('notes', None)
tag_id = self['tag_id']
# get the database connection
db_file = env_chk(self.get('db_file'), fw_spec)
mmdb = VaspMDCalcDb.from_db_file(db_file, admin=True)
mmdb.db.trajectories.find_one_and_delete({"runs_label": tag_id})
runs = mmdb.db['tasks'].find(
{"task_label": re.compile(f'.*run_{tag_id}.*')})
runs_sorted = sorted(runs, key=lambda x: int(re.findall('run[_-](\d+)', x['task_label'])[0]))
# Remove duplicates of the same run (if they exist)
labels = [result['task_label'] for result in runs_sorted]
nums = [int(re.findall('run[_-](\d+)', label)[0]) for label in labels]
duplicates = np.where((nums - np.roll(nums, 1)) == 0)[0]
runs_sorted = [runs_sorted[i] for i in range(len(runs_sorted)) if i not in duplicates]
trajectory_doc = runs_to_trajectory_doc(runs_sorted, db_file, tag_id, notes)
mmdb.db.trajectories.insert_one(trajectory_doc)
def run_task(self, fw_spec):
lobster_cmd = env_chk(self.get("lobster_cmd"), fw_spec)
gzip_output = self.get("gzip_output", True)
gzip_WAVECAR = self.get("gzip_WAVECAR", False)
if gzip_WAVECAR:
add_files_to_gzip = VASP_OUTPUT_FILES
else:
add_files_to_gzip = [f for f in VASP_OUTPUT_FILES if f not in ["WAVECAR"]]
handler_groups = {"default": [], "no_handler": []}
validator_groups = {
"default": [
LobsterFilesValidator(),
EnoughBandsValidator(output_filename="lobsterout"),
],
"strict": [
ChargeSpillingValidator(output_filename="lobsterout"),
LobsterFilesValidator(),
def run_task(self, fw_spec):
cmd = self.get("qchem_cmd")
scratch_dir = env_chk(self.get("scratch_dir"), fw_spec)
if scratch_dir == None:
scratch_dir = "/dev/shm/qcscratch/"
os.putenv("QCSCRATCH", scratch_dir)
logger.info("Running command: {}".format(cmd))
return_code = subprocess.call(cmd, shell=True)
logger.info("Command {} finished running with return code: {}".format(
cmd, return_code))
def run_task(self, fw_spec):
calc_locs = list(fw_spec.get("calc_locs", []))
calc_locs.append({"name": self["name"],
"filesystem": env_chk(self.get('filesystem', None), fw_spec),
"path": self.get("path", os.getcwd())})
return FWAction(mod_spec=[{'_push_all': {'calc_locs': calc_locs}}])
def run_task(self, fw_spec):
# load INCAR
incar_name = self.get("input_filename", "INCAR")
incar = Incar.from_file(incar_name)
# process FireWork env values via env_chk
incar_update = env_chk(self.get('incar_update'), fw_spec)
incar_multiply = env_chk(self.get('incar_multiply'), fw_spec)
incar_dictmod = env_chk(self.get('incar_dictmod'), fw_spec)
if incar_update:
incar.update(incar_update)
if incar_multiply:
for k in incar_multiply:
incar[k] = incar[k] * incar_multiply[k]
if incar_dictmod:
apply_mod(incar_dictmod, incar)
# write INCAR
incar.write_file(self.get("output_filename", "INCAR"))