Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
else:
output_fusion = output_data.dict()
# Do not override if computer generates
output_fusion["stdout"] = output_fusion.get("stdout", None) or metadata["stdout"]
output_fusion["stderr"] = output_fusion.get("stderr", None) or metadata["stderr"]
if metadata["success"] is not True:
output_fusion["success"] = False
output_fusion["error"] = {"error_type": metadata["error_type"], "error_message": metadata["error_message"]}
# Raise an error if one exists and a user requested a raise
if raise_error and (output_fusion["success"] is not True):
msg = "stdout:\n{}".format(output_fusion["stdout"])
msg += "\nstderr:\n{}".format(output_fusion["stderr"])
LOGGER.info(msg)
raise ValueError(output_fusion["error"]["error_message"])
# Fill out provenance datadata
provenance_augments = get_provenance_augments()
provenance_augments["wall_time"] = metadata["wall_time"]
if "provenance" in output_fusion:
output_fusion["provenance"].update(provenance_augments)
else:
# Add onto the augments with some missing info
provenance_augments["creator"] = "QCEngine"
provenance_augments["version"] = provenance_augments["qcengine_version"]
output_fusion["provenance"] = provenance_augments
if metadata["retries"] != 0:
output_fusion["provenance"]["retries"] = metadata["retries"]
parent = Path(parent)
tmpdir = parent / child
try:
os.mkdir(tmpdir)
except FileExistsError:
if exist_ok:
pass
else:
raise
try:
yield tmpdir
finally:
if not messy:
shutil.rmtree(tmpdir)
LOGGER.info(f"... Removing {tmpdir}")
omode = "wb" if fl in as_binary else "w"
filename = lwd / fl
with open(filename, omode) as fp:
fp.write(content)
LOGGER.info(f"... Writing ({omode}): {filename}")
yield outfiles
finally:
for fl in outfiles.keys():
omode = "rb" if fl in as_binary else "r"
try:
filename = lwd / fl
with open(filename, omode) as fp:
outfiles[fl] = fp.read()
LOGGER.info(f"... Writing ({omode}): {filename}")
except (OSError, FileNotFoundError):
if "*" in fl:
gfls = {}
for gfl in lwd.glob(fl):
with open(gfl, omode) as fp:
gfls[gfl.name] = fp.read()
LOGGER.info(f"... Writing ({omode}): {gfl}")
if not gfls:
gfls = None
outfiles[fl] = gfls
else:
outfiles[fl] = None
finally:
for fl in outfiles.keys():
omode = "rb" if fl in as_binary else "r"
try:
filename = lwd / fl
with open(filename, omode) as fp:
outfiles[fl] = fp.read()
LOGGER.info(f"... Writing ({omode}): {filename}")
except (OSError, FileNotFoundError):
if "*" in fl:
gfls = {}
for gfl in lwd.glob(fl):
with open(gfl, omode) as fp:
gfls[gfl.name] = fp.read()
LOGGER.info(f"... Writing ({omode}): {gfl}")
if not gfls:
gfls = None
outfiles[fl] = gfls
else:
outfiles[fl] = None
args[0] = os.path.join(bin_prefix, args[0])
if sys.platform.startswith("win"):
# Allow using CTRL_C_EVENT / CTRL_BREAK_EVENT
popen_kwargs["creationflags"] = subprocess.CREATE_NEW_PROCESS_GROUP
# Route the standard error and output
popen_kwargs["stdout"] = subprocess.PIPE
popen_kwargs["stderr"] = subprocess.PIPE
# Prepare StringIO objects to store the stdout and stderr
stdout = io.BytesIO()
stderr = io.BytesIO()
# Launch the process
LOGGER.info("Popen", args, popen_kwargs)
# Ready the output
ret = {"proc": subprocess.Popen(args, **popen_kwargs)}
# Spawn threads that will read from the stderr/stdout
# The PIPE uses a buffer with finite capacity. The underlying
# process will stall if it is unable to write to the buffer
# because the buffer is full. These threads continuously read
# from the buffers to ensure that they do not fill.
#
def read_from_buffer(buffer: BinaryIO, storage: io.BytesIO, sysio: TextIO):
for r in iter(partial(buffer.read, 1024), b""):
storage.write(r)
if pass_output_forward:
sysio.write(r.decode())