How to use the strax.endtime function in strax

To help you get started, we’ve selected a few strax examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github AxFoundation / strax / tests / test_overlap_plugin.py View on Github external
return window

        def compute(self, peaks):
            result = dict(
                n_within_window=count_in_window(strax.endtime(peaks)))
            return result

        def iter(self, *args, **kwargs):
            yield from super().iter(*args, **kwargs)

    st = strax.Context(storage=[])
    st.register(Peaks)
    st.register(WithinWindow)

    result = st.get_array(run_id='some_run', targets='within_window')
    expected = count_in_window(strax.endtime(input_peaks))

    assert len(expected) == len(input_peaks), "WTF??"
    assert isinstance(result, np.ndarray), "Did not get an array"
    assert len(result) == len(expected), "Result has wrong length"
    np.testing.assert_equal(result['n_within_window'], expected,
                            "Counting went wrong")
github AxFoundation / strax / tests / test_helpers.py View on Github external
def test_dsi(intvs):
    bs = list(zip(intvs['time'].tolist(), strax.endtime(intvs).tolist()))
    assert is_sorted(bs)
    assert is_disjoint(bs)
github AxFoundation / strax / strax / processing / general.py View on Github external
def touching_windows(things, containers, window=0):
    """Return array of (start, exclusive end) indices into things which extend
    to within window of the container, for each container in containers.

    :param things: Sorted array of interval-like data
    :param containers: Sorted array of interval-like data
    :param window: threshold distance for touching check
    For example:
       - window = 0: things must overlap one sample
       - window = -1: things can start right after container ends
         (i.e. container endtime equals the thing starttime, since strax
          endtimes are exclusive)
    """
    return _touching_windows(
        things['time'], strax.endtime(things),
        containers['time'], strax.endtime(containers),
        window=window)
github AxFoundation / strax / strax / processing / general.py View on Github external
def fully_contained_in(things, containers):
    """Return array of len(things) with index of interval in containers
    for which things are fully contained in a container, or -1 if no such
    exists.
    We assume all intervals are sorted by time, and b_intervals
    nonoverlapping.
    """
    result = np.ones(len(things), dtype=np.int32) * -1
    a_starts = things['time']
    b_starts = containers['time']
    a_ends = strax.endtime(things)
    b_ends = strax.endtime(containers)
    _fc_in(a_starts, b_starts, a_ends, b_ends, result)
    return result
github AxFoundation / strax / strax / xenon / plugins.py View on Github external
"""
        # Mock up a "hits" array so we can just use the existing peakfinder
        # It doesn't work on raw peaks, since they might have different dts
        # TODO: is there no cleaner way?
        fake_hits = np.zeros(len(peaks), dtype=strax.hit_dtype)
        fake_hits['dt'] = 1
        fake_hits['time'] = peaks['time']
        # TODO: could this cause int overrun nonsense anywhere?
        fake_hits['length'] = peaks['endtime'] - peaks['time']
        fake_peaks = strax.find_peaks(
            fake_hits, to_pe=np.zeros(1),
            gap_threshold=gap_threshold,
            left_extension=left_extension, right_extension=right_extension,
            min_hits=1, min_area=0,
            max_duration=max_duration)
        return fake_peaks['time'], strax.endtime(fake_peaks)
github AxFoundation / strax / strax / processing / general.py View on Github external
def _find_break_i(data, safe_break, not_before):
    """Return first index of element right of the first gap
    larger than safe_break in data.

    Assumes all x have the same length and are sorted!

    :param tolerant: if no break found, yield an as good as possible break
    anyway.
    """
    assert len(data) >= 2
    latest_end_seen = max(not_before, strax.endtime(data[0]))
    for i, d in enumerate(data):
        if i == 0:
            continue
        if d['time'] >= latest_end_seen + safe_break:
            return i
        latest_end_seen = max(latest_end_seen,
                              strax.endtime(d))
    raise NoBreakFound
github AxFoundation / strax / strax / processing / peak_building.py View on Github external
"""
    # Mock up a "hits" array so we can just use the existing peakfinder
    # It doesn't work on raw peaks, since they might have different dts
    # TODO: is there no cleaner way?
    fake_hits = np.zeros(len(peaks), dtype=strax.hit_dtype)
    fake_hits['dt'] = 1
    fake_hits['area'] = 1
    fake_hits['time'] = peaks['time']
    # TODO: could this cause int overrun nonsense anywhere?
    fake_hits['length'] = strax.endtime(peaks) - peaks['time']
    fake_peaks = strax.find_peaks(
        fake_hits, adc_to_pe=np.ones(1),
        gap_threshold=gap_threshold,
        left_extension=left_extension, right_extension=right_extension,
        min_channels=1, min_area=0)
    return fake_peaks['time'], strax.endtime(fake_peaks)
github AxFoundation / strax / strax / storage.py View on Github external
def save(self, data: np.ndarray, chunk_i: int):
        if self.closed:
            raise RuntimeError(f"{self.key.data_type} saver already closed!")

        fn = '%06d' % chunk_i
        chunk_info = dict(chunk_i=chunk_i,
                          filename=fn,
                          n=len(data),
                          nbytes=data.nbytes)
        if 'time' in data[0].dtype.names:
            for desc, i in (('first', 0), ('last', -1)):
                chunk_info[f'{desc}_time'] = int(data[i]['time'])
                chunk_info[f'{desc}_endtime'] = int(strax.endtime(data[i]))

        if not self.meta_only:
            chunk_info['filesize'] = strax.save_file(
                filename=os.path.join(self.tempdirname, fn),
                data=data,
                compressor=self.md['compressor'])
        with open(f'{self.tempdirname}/metadata_{chunk_i:06d}.json',
                  mode='w') as f:
            f.write(json.dumps(chunk_info, **self.json_options))
github AxFoundation / strax / strax / storage / common.py View on Github external
if self.closed:
            raise RuntimeError(f"Attmpt to save to {self.md} saver, "
                               f"which is already closed!")

        chunk_info = dict(chunk_i=chunk_i,
                          n=len(chunk),
                          start=chunk.start,
                          end=chunk.end,
                          run_id=chunk.run_id,
                          nbytes=chunk.nbytes)
        if len(chunk) != 0 and 'time' in chunk.dtype.names:
            for desc, i in (('first', 0), ('last', -1)):
                chunk_info[f'{desc}_time'] = \
                    int(chunk.data[i]['time'])
                chunk_info[f'{desc}_endtime'] = \
                    int(strax.endtime(chunk.data[i]))

        if len(chunk):
            bonus_info, future = self._save_chunk(
                chunk.data,
                chunk_info,
                executor=None if self.is_forked else executor)
            chunk_info.update(bonus_info)
        else:
            # No need to create an empty file for an empty chunk;
            # the annotation in the metadata is sufficient.
            future = None

        self._save_chunk_metadata(chunk_info)
        return future