Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_init(self):
tstamps = np.arange(1.0, 100.0, 0.1, dtype=np.float)
ts = TimeSeries("test_ts", list(range(len(tstamps))), 'unit', timestamps=tstamps)
ept = TimeIntervals('epochs', "TimeIntervals unittest")
self.assertEqual(ept.name, 'epochs')
ept.add_interval(10.0, 20.0, ["test", "unittest", "pynwb"], ts)
row = ept[0]
self.assertEqual(row[1], 10.0)
self.assertEqual(row[2], 20.0)
self.assertEqual(row[3], ["test", "unittest", "pynwb"])
self.assertEqual(row[4], [(90, 100, ts)])
def test_print_file(self):
nwbfile = NWBFile(session_description='session_description',
identifier='identifier', session_start_time=datetime.now(tzlocal()))
ts = TimeSeries('name', [1., 2., 3.] * 1000, timestamps=[1, 2, 3])
ts2 = TimeSeries('name2', [1, 2, 3] * 1000, timestamps=[1, 2, 3])
self.assertEqual(str(ts), """
name
Fields:
comments: no comments
conversion: 1.0
data: [1. 2. 3. ... 1. 2. 3.]
description: no description
interval: 1
resolution: 0.0
timestamps: [1 2 3]
timestamps_unit: Seconds
"""
)
nwbfile.add_acquisition(ts)
nwbfile.add_acquisition(ts2)
nwbfile.add_epoch(start_time=1.0, stop_time=10.0, tags=['tag1', 'tag2'])
def test_init_rate(self):
ts = TimeSeries('test_ts', list(), 'unit', starting_time=0.0, rate=1.0)
self.assertEqual(ts.starting_time, 0.0)
self.assertEqual(ts.rate, 1.0)
self.assertEqual(ts.time_unit, "Seconds")
def test_write_dataset_custom_fillvalue(self):
a = H5DataIO(np.arange(20).reshape(5, 4), fillvalue=-1)
ts = TimeSeries('ts_name', a, 'A', timestamps=np.arange(5))
self.nwbfile.add_acquisition(ts)
with NWBHDF5IO(self.path, 'w') as io:
io.write(self.nwbfile, cache_spec=False)
with File(self.path, 'r') as f:
dset = f['/acquisition/ts_name/data']
self.assertTrue(np.all(dset[:] == a.data))
self.assertEqual(dset.fillvalue, -1)
def test_gzip_timestamps(self):
ts = TimeSeries('ts_name', [1, 2, 3], 'A', timestamps=H5DataIO(np.array([1., 2., 3.]), compression='gzip'))
self.nwbfile.add_acquisition(ts)
with NWBHDF5IO(self.path, 'w') as io:
io.write(self.nwbfile, cache_spec=False)
# confirm that the dataset was indeed compressed
with File(self.path, 'r') as f:
self.assertEqual(f['/acquisition/ts_name/timestamps'].compression, 'gzip')
name='test_data', shape=(None,),
attributes=[NWBAttributeSpec(name='target_ds',
doc='the target the dataset applies to',
dtype=RefSpec('TimeSeries', 'object'))],
neurodata_type_def='my_new_type')
ns_builder.add_spec(self.ext_source, test_ds_ext)
ns_builder.export(self.ns_path, outdir=self.tempdir)
type_map = get_type_map(extensions=os.path.join(self.tempdir, self.ns_path))
my_new_type = type_map.get_container_cls(self.prefix, 'my_new_type')
docval = None
for tmp in get_docval(my_new_type.__init__):
if tmp['name'] == 'target_ds':
docval = tmp
break
self.assertIsNotNone(docval)
self.assertEqual(docval['type'], TimeSeries)
def test_get_neurodata_type(self):
ts1 = TimeSeries('test_ts1', [0, 1, 2, 3, 4, 5],
'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])
ts2 = TimeSeries('test_ts2', [0, 1, 2, 3, 4, 5],
'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])
self.nwbfile.add_acquisition(ts1)
self.nwbfile.add_acquisition(ts2)
p1 = ts1.get_ancestor(neurodata_type='NWBFile')
self.assertIs(p1, self.nwbfile)
p2 = ts2.get_ancestor(neurodata_type='NWBFile')
self.assertIs(p2, self.nwbfile)
stimulus_notes='test stimulus notes',
data_collection='test data collection notes',
experiment_description='test experiment description',
institution='nomad',
lab='nolab',
notes='nonotes',
pharmacology='nopharmacology',
protocol='noprotocol',
related_publications='nopubs',
session_id='007',
slices='noslices',
source_script='nosources',
surgery='nosurgery',
virus='novirus',
source_script_file_name='nofilename')
self.ts = TimeSeries(name='test_timeseries', data=list(range(100, 200, 10)),
unit='SIunit', timestamps=np.arange(10.), resolution=0.1)
self.nwbfile.add_acquisition(self.ts)
self.ts2 = TimeSeries(name='test_timeseries2', data=list(range(200, 300, 10)),
unit='SIunit', timestamps=np.arange(10.), resolution=0.1)
self.nwbfile.add_analysis(self.ts2)
self.mod = self.nwbfile.create_processing_module('test_module', 'a test module')
self.ts3 = TimeSeries(name='test_timeseries2', data=list(range(100, 200, 10)),
unit='SIunit', timestamps=np.arange(10.), resolution=0.1)
self.mod.add(self.ts3)
institution='nomad',
lab='nolab',
notes='nonotes',
pharmacology='nopharmacology',
protocol='noprotocol',
related_publications='nopubs',
session_id='007',
slices='noslices',
source_script='nosources',
surgery='nosurgery',
virus='novirus',
source_script_file_name='nofilename')
self.ts = TimeSeries(name='test_timeseries', data=list(range(100, 200, 10)),
unit='SIunit', timestamps=np.arange(10.), resolution=0.1)
self.nwbfile.add_acquisition(self.ts)
self.ts2 = TimeSeries(name='test_timeseries2', data=list(range(200, 300, 10)),
unit='SIunit', timestamps=np.arange(10.), resolution=0.1)
self.nwbfile.add_analysis(self.ts2)
self.mod = self.nwbfile.create_processing_module('test_module', 'a test module')
self.ts3 = TimeSeries(name='test_timeseries2', data=list(range(100, 200, 10)),
unit='SIunit', timestamps=np.arange(10.), resolution=0.1)
self.mod.add(self.ts3)
def test_add_analysis(self):
self.nwbfile.add_analysis(TimeSeries('test_ts', [0, 1, 2, 3, 4, 5],
'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]))
self.assertEqual(len(self.nwbfile.analysis), 1)