Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def load_from_readers(input_dir, erase=True):
"""Return concatenated & sorted records from multiple reader data files"""
records = [strax.load(f,
compressor=compressor,
dtype=strax.record_dtype())
for f in glob.glob(f'{input_dir}/reader_*')]
records = np.concatenate(records)
records = strax.sort_by_time(records)
if erase:
shutil.rmtree(input_dir)
return records
def compute(self):
raw_records = strax.load(self.run_name)
raw_records = strax.sort_by_time(raw_records)
strax.baseline(raw_records)
strax.integrate(raw_records)
return raw_records
def _load_chunk(self, path, kind='central'):
records = [strax.load_file(fn,
compressor='blosc',
dtype=strax.record_dtype())
for fn in glob.glob(f'{path}/reader_*')]
records = np.concatenate(records)
records = strax.sort_by_time(records)
if kind == 'central':
result = records
else:
result = strax.from_break(
records,
safe_break=self.config['safe_break_in_pulses'],
left=kind == 'post',
tolerant=True)
if self.config['erase']:
shutil.rmtree(path)
return result
def finish_results():
nonlocal results
records = np.concatenate(results)
# In strax data, records are always stored
# sorted, baselined and integrated
records = strax.sort_by_time(records)
strax.baseline(records)
strax.integrate(records)
results = []
return records
is_split=is_split,
orig_dt=records[0]['dt'],
min_area=min_area,
args_options=tuple(args_options),
result_dtype=peaks.dtype)
if is_split.sum() != 0:
# Found new peaks: compute basic properties
strax.sum_waveform(new_peaks, records, to_pe)
strax.compute_widths(new_peaks)
# ... and recurse (if needed)
new_peaks = self(new_peaks, records, to_pe,
do_iterations=do_iterations - 1,
min_area=min_area, **kwargs)
peaks = strax.sort_by_time(np.concatenate([peaks[~is_split],
new_peaks]))
return peaks