How to use the cwltool.loghandler._logger.info function in cwltool

To help you get started, we’ve selected a few cwltool examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github common-workflow-language / cwltool / cwltool / checker.py View on Github external
out = {}
        for field in rec["fields"]:
            name = shortname(field["name"])
            out[name] = field["type"]
        return out

    srcfields = _rec_fields(src)
    sinkfields = _rec_fields(sink)
    for key in sinkfields.keys():
        if (
            not can_assign_src_to_sink(
                srcfields.get(key, "null"), sinkfields.get(key, "null"), strict
            )
            and sinkfields.get(key) is not None
        ):
            _logger.info(
                "Record comparison failure for %s and %s\n"
                "Did not match fields for %s: %s and %s",
                src["name"],
                sink["name"],
                key,
                srcfields.get(key),
                sinkfields.get(key),
            )
            return False
    return True
github common-workflow-language / cwltool / cwltool / provenance.py View on Github external
After calling this method, this ResearchObject instance can no longer
        be used, except for no-op calls to .close().

        The 'saveTo' folder should not exist - if it does, it will be deleted.

        It is safe to call this function multiple times without the
        'saveTo' argument, e.g. within a try..finally block to
        ensure the temporary files of this Research Object are removed.
        """
        if save_to is None:
            if not self.closed:
                _logger.debug("[provenance] Deleting temporary %s", self.folder)
                shutil.rmtree(self.folder, ignore_errors=True)
        else:
            save_to = os.path.abspath(save_to)
            _logger.info("[provenance] Finalizing Research Object")
            self._finalize()  # write manifest etc.
            # TODO: Write as archive (.zip or .tar) based on extension?

            if os.path.isdir(save_to):
                _logger.info("[provenance] Deleting existing %s", save_to)
                shutil.rmtree(save_to)
            shutil.move(self.folder, save_to)
            _logger.info("[provenance] Research Object saved to %s", save_to)
            self.folder = save_to
        self.closed = True
github common-workflow-language / cwltool / cwltool / provenance.py View on Github external
"""
        if save_to is None:
            if not self.closed:
                _logger.debug("[provenance] Deleting temporary %s", self.folder)
                shutil.rmtree(self.folder, ignore_errors=True)
        else:
            save_to = os.path.abspath(save_to)
            _logger.info("[provenance] Finalizing Research Object")
            self._finalize()  # write manifest etc.
            # TODO: Write as archive (.zip or .tar) based on extension?

            if os.path.isdir(save_to):
                _logger.info("[provenance] Deleting existing %s", save_to)
                shutil.rmtree(save_to)
            shutil.move(self.folder, save_to)
            _logger.info("[provenance] Research Object saved to %s", save_to)
            self.folder = save_to
        self.closed = True
github common-workflow-language / cwltool / cwltool / singularity.py View on Github external
candidates.append(_normalize_image_id(dockerRequirement["dockerImageId"]))
            if is_version_3_or_newer():
                candidates.append(_normalize_sif_id(dockerRequirement["dockerPull"]))

        targets = [os.getcwd()]
        if "CWL_SINGULARITY_CACHE" in os.environ:
            targets.append(os.environ["CWL_SINGULARITY_CACHE"])
        if is_version_2_6() and "SINGULARITY_PULLFOLDER" in os.environ:
            targets.append(os.environ["SINGULARITY_PULLFOLDER"])
        for target in targets:
            for dirpath, subdirs, files in os.walk(target):
                for entry in files:
                    if entry in candidates:
                        path = os.path.join(dirpath, entry)
                        if os.path.isfile(path):
                            _logger.info(
                                "Using local copy of Singularity image found in %s",
                                dirpath,
                            )
                            dockerRequirement["dockerImageId"] = path
                            found = True
        if (force_pull or not found) and pull_image:
            cmd = []  # type: List[str]
            if "dockerPull" in dockerRequirement:
                if cache_folder:
                    env = os.environ.copy()
                    if is_version_2_6():
                        env["SINGULARITY_PULLFOLDER"] = cache_folder
                        cmd = [
                            "singularity",
                            "pull",
                            "--force",
github common-workflow-language / cwltool / cwltool / sandboxjs.py View on Github external
fn = code_fragment_to_js(js, jslib)

    returncode, stdout, stderr = exec_js_process(
        fn, timeout, js_console=js_console, force_docker_pull=force_docker_pull
    )

    if js_console:
        if stderr is not None:
            _logger.info("Javascript console output:")
            _logger.info("----------------------------------------")
            _logger.info(
                "\n".join(
                    re.findall(r"^[[](?:log|err)[]].*$", stderr, flags=re.MULTILINE)
                )
            )
            _logger.info("----------------------------------------")

    def stdfmt(data):  # type: (str) -> str
        if "\n" in data:
            return "\n" + data.strip()
        return data

    def fn_linenum():  # type: () -> str
        lines = fn.splitlines()
        ofs = 0
        maxlines = 99
        if len(lines) > maxlines:
            ofs = len(lines) - maxlines
            lines = lines[-maxlines:]
        return "\n".join("%02i %s" % (i + ofs + 1, b) for i, b in enumerate(lines))

    if returncode != 0:
github common-workflow-language / cwltool / cwltool / docker.py View on Github external
if "dockerPull" in docker_requirement:
                cmd = ["docker", "pull", str(docker_requirement["dockerPull"])]
                _logger.info(str(cmd))
                subprocess.check_call(cmd, stdout=sys.stderr)  # nosec
                found = True
            elif "dockerFile" in docker_requirement:
                dockerfile_dir = str(tempfile.mkdtemp(prefix=tmp_outdir_prefix))
                with open(os.path.join(dockerfile_dir, "Dockerfile"), "wb") as dfile:
                    dfile.write(docker_requirement["dockerFile"].encode("utf-8"))
                cmd = [
                    "docker",
                    "build",
                    "--tag=%s" % str(docker_requirement["dockerImageId"]),
                    dockerfile_dir,
                ]
                _logger.info(str(cmd))
                subprocess.check_call(cmd, stdout=sys.stderr)  # nosec
                found = True
            elif "dockerLoad" in docker_requirement:
                cmd = ["docker", "load"]
                _logger.info(str(cmd))
                if os.path.exists(docker_requirement["dockerLoad"]):
                    _logger.info(
                        "Loading docker image from %s",
                        docker_requirement["dockerLoad"],
                    )
                    with open(docker_requirement["dockerLoad"], "rb") as dload:
                        loadproc = subprocess.Popen(  # nosec
                            cmd, stdin=dload, stdout=sys.stderr
                        )
                else:
                    loadproc = subprocess.Popen(  # nosec
github common-workflow-language / cwltool / cwltool / main.py View on Github external
return 0

            if args.print_rdf:
                stdout.write(
                    printrdf(tool, loadingContext.loader.ctx, args.rdf_serializer)
                )
                return 0

            if args.print_dot:
                printdot(tool, loadingContext.loader.ctx, stdout)
                return 0

            if args.print_targets:
                for f in ("outputs", "steps", "inputs"):
                    if tool.tool[f]:
                        _logger.info("%s%s targets:", f[0].upper(), f[1:-1])
                        stdout.write(
                            "  "
                            + "\n  ".join([shortname(t["id"]) for t in tool.tool[f]])
                            + "\n"
                        )
                return 0

            if args.target:
                ctool = choose_target(args, tool, loadingContext)
                if ctool is None:
                    return 1
                else:
                    tool = ctool

            if args.print_subgraph:
                if "name" in tool.tool:
github common-workflow-language / cwltool / cwltool / process.py View on Github external
(key, r[key])
                        for key in r
                        if key not in self.doc_loader.identifiers
                    )  # strip identifiers
                    validate_ex(
                        cast(
                            Schema,
                            avsc_names.get_name(cast(str, plain_hint["class"]), None),
                        ),
                        plain_hint,
                        strict=strict,
                    )
                elif r["class"] in ("NetworkAccess", "LoadListingRequirement"):
                    pass
                else:
                    _logger.info(str(sl.makeError("Unknown hint %s" % (r["class"]))))
github common-workflow-language / cwltool / cwltool / command_line_tool.py View on Github external
# get the shared lock to ensure no other process is trying
            # to write to this cache
            shared_file_lock(jobcachelock)
            jobcachelock.seek(0)
            jobstatus = jobcachelock.read()

            if os.path.isdir(jobcache) and jobstatus == "success":
                if docker_req and runtimeContext.use_container:
                    cachebuilder.outdir = (
                        runtimeContext.docker_outdir or random_outdir()
                    )
                else:
                    cachebuilder.outdir = jobcache

                _logger.info("[job %s] Using cached output in %s", jobname, jobcache)
                yield CallbackJob(self, output_callbacks, cachebuilder, jobcache)
                # we're done with the cache so release lock
                jobcachelock.close()
                return
            else:
                _logger.info(
                    "[job %s] Output of job will be cached in %s", jobname, jobcache
                )

                # turn shared lock into an exclusive lock since we'll
                # be writing the cache directory
                upgrade_lock(jobcachelock)

                shutil.rmtree(jobcache, True)
                os.makedirs(jobcache)
                runtimeContext = runtimeContext.copy()