How to use the pycbc.workflow.core.File function in PyCBC

To help you get started, we’ve selected a few PyCBC examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github gwastro / pycbc / pycbc / workflow / core.py View on Github external
def get_command_line(self):
        self._finalize()
        arglist = self._dax_node.arguments

        tmpargs = []
        for a in arglist:
            if not isinstance(a, File):
                tmpargs += a.split(' ')
            else:
                tmpargs.append(a)
        arglist = tmpargs

        arglist = [a for a in arglist if a != '']

        arglist = [a.storage_path if isinstance(a, File) else a for a in arglist]

        # This allows the pfn to be an http(s) URL, which will be
        # downloaded by resolve_url
        exe_path = urllib.parse.urlsplit(self.executable.get_pfn()).path

        return [exe_path] + arglist
github gwastro / pycbc / pycbc / workflow / core.py View on Github external
def get_command_line(self):
        self._finalize()
        arglist = self._dax_node.arguments
        
        tmpargs = []
        for a in arglist:
            if not isinstance(a, File):
                tmpargs += a.split(' ')
            else:
                tmpargs.append(a)
        arglist = tmpargs
        
        arglist = [a for a in arglist if a != '']
        
        arglist = [a.storage_path if isinstance(a, File) else a for a in arglist]
       
        # This allows the pfn to be an http(s) URL, which will be
        # downloaded by resolve_url
        exe_path = urlparse.urlsplit(self.executable.get_pfn()).path

        return [exe_path] + arglist
github gwastro / pycbc / pycbc / workflow / core.py View on Github external
detectors. Files will be supplied as --opt ifo1:input1 ifo2:input2
            .....
            File names are created internally from the provided extension and
            analysis time.
        """
        all_tags = copy.deepcopy(self.executable.tags)
        for tag in tags:
            if tag not in all_tags:
                all_tags.append(tag)

        output_files = FileList([])
        store_file = store_file if store_file is not None \
                                              else self.executable.retain_files

        for ifo in ifos:
            curr_file = File(ifo, self.executable.name, analysis_time,
                             extension=extension, store_file=store_file,
                             directory=self.executable.out_dir, tags=all_tags,
                             use_tmp_subdirs=use_tmp_subdirs)
            output_files.append(curr_file)
        self.add_multiifo_output_list_opt(opt, output_files)
github gwastro / pycbc / pycbc / workflow / segment.py View on Github external
workflow.cp, tags=tags):
            if execute_now:
                workflow.execute_node(cum_node)
            else:
                workflow.add_node(cum_node)
        else:
            cum_node.executed = True
            for fil in cum_node._outputs:
                fil.node = None
                fil.PFN(urljoin('file:', pathname2url(fil.storage_path)),
                        site='local')
        add_inputs += cum_node.output_files

    # add cumulative files for each ifo together
    name = '%s_VETO_SEGMENTS' %(segment_name)
    outfile = File(workflow.ifos, name, workflow.analysis_time,
                                            directory=out_dir, extension='xml',
                                            tags=[segment_name] + tags)
    add_job = LigolwAddExecutable(cp, 'llwadd', ifos=ifo, out_dir=out_dir,
                                  tags=tags)
    add_node = add_job.create_node(valid_segment, add_inputs, output=outfile)
    if file_needs_generating(add_node.output_files[0].cache_entry.path,
                             workflow.cp, tags=tags):
        if execute_now:
            workflow.execute_node(add_node)
        else:
            workflow.add_node(add_node)
    else:
        add_node.executed = True
        for fil in add_node._outputs:
            fil.node = None
            fil.PFN(urljoin('file:', pathname2url(fil.storage_path)),
github gwastro / pycbc / pycbc / workflow / datafind.py View on Github external
for cache in datafindcache_list:
        # sort the cache into time sequential order
        cache.sort()
        curr_ifo = cache.ifo
        for frame in cache:
            # Pegasus doesn't like "localhost" in URLs.
            frame.url = frame.url.replace('file://localhost','file://')

            # Create one File() object for each unique frame file that we
            # get back in the cache.
            if prev_file:
                prev_name = os.path.basename(prev_file.cache_entry.url)
                this_name = os.path.basename(frame.url)

            if (prev_file is None) or (prev_name != this_name):
                currFile = File(curr_ifo, frame.description,
                    frame.segment, file_url=frame.url, use_tmp_subdirs=True)
                datafind_filelist.append(currFile)
                prev_file = currFile

            # Populate the PFNs for the File() we just created
            if frame.url.startswith('file://'):
                currFile.PFN(frame.url, site='local')
                if frame.url.startswith(
                    'file:///cvmfs/oasis.opensciencegrid.org/ligo/frames'):
                    # Datafind returned a URL valid on the osg as well
                    # so add the additional PFNs to allow OSG access.
                    currFile.PFN(frame.url, site='osg')
                    currFile.PFN(frame.url.replace(
                        'file:///cvmfs/oasis.opensciencegrid.org/',
                        'root://xrootd-local.unl.edu/user/'), site='osg')
                    currFile.PFN(frame.url.replace(
github gwastro / pycbc / pycbc / workflow / plotting.py View on Github external
def make_veto_table(workflow, out_dir, vetodef_file=None, tags=None):
    """ Creates a node in the workflow for writing the veto_definer
    table. Returns a File instances for the output file.
    """
    if vetodef_file is None:
        if not workflow.cp.has_option_tags("workflow-segments",
                                           "segments-veto-definer-file", []):
            return None
        vetodef_file = workflow.cp.get_opt_tags("workflow-segments",
                                           "segments-veto-definer-file", [])
        file_url = urljoin('file:', pathname2url(vetodef_file))
        vdf_file = File(workflow.ifos, 'VETO_DEFINER',
                        workflow.analysis_time, file_url=file_url)
        vdf_file.PFN(file_url, site='local')
    else:
        vdf_file = vetodef_file

    if tags is None: tags = []
    makedir(out_dir)
    node = PlotExecutable(workflow.cp, 'page_vetotable', ifos=workflow.ifos,
                    out_dir=out_dir, tags=tags).create_node()
    node.add_input_opt('--veto-definer-file', vdf_file)
    node.new_output_file_opt(workflow.analysis_time, '.html', '--output-file')
    workflow += node
    return node.output_files[0]
github gwastro / pycbc / pycbc / workflow / grb_utils.py View on Github external
setattr(row,entry,'')
            elif entry == 'process_id':
                row.process_id = ilwd.ilwdchar("external_trigger:process_id:0")
            elif entry == 'event_id':
                row.event_id = ilwd.ilwdchar("external_trigger:event_id:0")
            else:
                print("Column %s not recognized" %(entry), file=sys.stderr)
                raise ValueError

    # Save file
    xml_file_name = "triggerGRB%s.xml" % str(cp.get("workflow",
                                                    "trigger-name"))
    xml_file_path = os.path.join(out_dir, xml_file_name)
    utils.write_filename(xmldoc, xml_file_path)
    xml_file_url = urljoin("file:", pathname2url(xml_file_path))
    xml_file = File(ifos, xml_file_name, sci_seg, file_url=xml_file_url)
    xml_file.PFN(xml_file_url, site="local")

    return xml_file
github gwastro / pycbc / pycbc / workflow / summaryplots.py View on Github external
# query for the hardware injection segments
    get_hardware_injection_segment_files(workflow, output_dir, hwinjDefNewPath)

    # create node
    node = Node(hwinjpage_job)
    node.add_opt('--gps-start-time', workflow.analysis_time[0])
    node.add_opt('--gps-end-time', workflow.analysis_time[1])
    node.add_opt('--source-xml', hwinjDefNewPath)
    node.add_opt('--segment-dir', output_dir)
    node.add_opt('--cache-file', cache_filename)
    node.add_opt('--cache-pattern', inspiral_cachepattern)
    node.add_opt('--analyze-injections', '')
    for ifo in workflow.ifos:
        node.add_opt('--%s-injections'%ifo.lower(), '')
    outfile = File(node.executable.ifo_string, 'HWINJ_SUMMARY',
                workflow.analysis_time, extension='html', directory=output_dir)
    node.add_opt('--outfile', outfile.storage_path)

    # add node to workflow
    workflow.add_node(node)

    # make all input_files parents
    #for f in input_files:
    #    dep = dax.Dependency(parent=f.node._dax_node, child=node._dax_node)
    #    workflow._adag.addDependency(dep)

    out_files += node.output_files

    logging.info("Leaving hardware injection page setup.")

    return out_files