Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_lab_meta_auto(self):
ns_builder = NWBNamespaceBuilder('Extension for use in my Lab', self.prefix, version='0.1.0')
test_meta_ext = NWBGroupSpec(
neurodata_type_def='MyTestMetaData',
neurodata_type_inc='LabMetaData',
doc='my test meta data',
attributes=[
NWBAttributeSpec(name='test_attr', dtype='float', doc='test_dtype')])
ns_builder.add_spec(self.ext_source, test_meta_ext)
ns_builder.export(self.ns_path, outdir=self.tempdir)
ns_abs_path = os.path.join(self.tempdir, self.ns_path)
load_namespaces(ns_abs_path)
MyTestMetaData = get_class('MyTestMetaData', self.prefix)
nwbfile = NWBFile("a file with header data", "NB123A", datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal()))
nwbfile.add_lab_meta_data(MyTestMetaData(name='test_name', test_attr=5.))
def test_lab_meta(self):
ns_builder = NWBNamespaceBuilder('Extension for use in my Lab', self.prefix)
test_meta_ext = NWBGroupSpec(
neurodata_type_def='MyTestMetaData',
neurodata_type_inc='LabMetaData',
doc='my test meta data',
attributes=[
NWBAttributeSpec(name='test_attr', dtype='float', doc='test_dtype')])
ns_builder.add_spec(self.ext_source, test_meta_ext)
ns_builder.export(self.ns_path, outdir=self.tempdir)
ns_abs_path = os.path.join(self.tempdir, self.ns_path)
load_namespaces(ns_abs_path)
@register_class('MyTestMetaData', self.prefix)
class MyTestMetaData(LabMetaData):
__nwbfields__ = ('test_attr',)
@docval({'name': 'name', 'type': str, 'doc': 'name'},
def test_lab_meta_auto(self):
ns_builder = NWBNamespaceBuilder('Extension for use in my Lab', self.prefix)
test_meta_ext = NWBGroupSpec(
neurodata_type_def='MyTestMetaData',
neurodata_type_inc='LabMetaData',
doc='my test meta data',
attributes=[
NWBAttributeSpec(name='test_attr', dtype='float', doc='test_dtype')])
ns_builder.add_spec(self.ext_source, test_meta_ext)
ns_builder.export(self.ns_path, outdir=self.tempdir)
ns_abs_path = os.path.join(self.tempdir, self.ns_path)
load_namespaces(ns_abs_path)
MyTestMetaData = get_class('MyTestMetaData', self.prefix)
nwbfile = NWBFile("a file with header data", "NB123A", datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal()))
nwbfile.add_lab_meta_data(MyTestMetaData(name='test_name', test_attr=5.))
def test_write_cache_spec(self):
'''
Round-trip test for writing spec and reading it back in
'''
with File(self.path, 'w') as fil:
with HDF5IO(self.path, manager=self.manager, file=fil, mode='a') as io:
io.write(self.container)
with File(self.path, 'r') as f:
self.assertIn('specifications', f)
ns_catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
HDF5IO.load_namespaces(ns_catalog, self.path)
original_ns = self.manager.namespace_catalog.get_namespace('core')
cached_ns = ns_catalog.get_namespace('core')
self.maxDiff = None
for key in ('author', 'contact', 'doc', 'full_name', 'name'):
with self.subTest(namespace_field=key):
self.assertEqual(original_ns[key], cached_ns[key])
for dt in original_ns.get_registered_types():
with self.subTest(neurodata_type=dt):
original_spec = original_ns.get_spec(dt)
cached_spec = cached_ns.get_spec(dt)
with self.subTest(test='data_type spec read back in'):
self.assertIsNotNone(cached_spec)
with self.subTest(test='cached spec preserved original spec'):
self.assertDictEqual(original_spec, cached_spec)
def test_catch_dup_name(self):
ns_builder1 = NWBNamespaceBuilder('Extension for us in my Lab', "pynwb_test_extension1", version='0.1.0')
ext1 = NWBGroupSpec('A custom ElectricalSeries for my lab',
attributes=[NWBAttributeSpec(name='trode_id', doc='the tetrode id', dtype='int')],
neurodata_type_inc='ElectricalSeries',
neurodata_type_def='TetrodeSeries')
ns_builder1.add_spec(self.ext_source1, ext1)
ns_builder1.export(self.ns_path1, outdir=self.tempdir)
ns_builder2 = NWBNamespaceBuilder('Extension for us in my Lab', "pynwb_test_extension1", version='0.1.0')
ext2 = NWBGroupSpec('A custom ElectricalSeries for my lab',
attributes=[NWBAttributeSpec(name='trode_id', doc='the tetrode id', dtype='int')],
neurodata_type_inc='ElectricalSeries',
neurodata_type_def='TetrodeSeries')
ns_builder2.add_spec(self.ext_source2, ext2)
ns_builder2.export(self.ns_path2, outdir=self.tempdir)
type_map = get_type_map(extensions=os.path.join(self.tempdir, self.ns_path1))
with self.assertWarnsRegex(UserWarning, r"ignoring namespace '\S+' because it already exists"):
type_map.load_namespaces(os.path.join(self.tempdir, self.ns_path2))
from pynwb.spec import NWBNamespaceBuilder, NWBGroupSpec, NWBAttributeSpec
ns_builder = NWBNamespaceBuilder('Extension for us in my Lab', "mylab")
ext1 = NWBGroupSpec('A custom ElectricalSeries for my lab',
attributes=[NWBAttributeSpec('trode_id', 'the tetrode id', 'int')],
neurodata_type_inc='ElectricalSeries',
neurodata_type_def='TetrodeSeries')
ext_source = 'fake_extension.yaml'
ns_builder.add_spec(ext_source, ext1)
ns_path = 'fake_namespace.yaml'
ns_builder.export(ns_path)
def test_catch_dup_name(self):
ns_builder1 = NWBNamespaceBuilder('Extension for us in my Lab', "pynwb_test_extension1")
ext1 = NWBGroupSpec('A custom ElectricalSeries for my lab',
attributes=[NWBAttributeSpec(name='trode_id', doc='the tetrode id', dtype='int')],
neurodata_type_inc='ElectricalSeries',
neurodata_type_def='TetrodeSeries')
ns_builder1.add_spec(self.ext_source1, ext1)
ns_builder1.export(self.ns_path1, outdir=self.tempdir)
ns_builder2 = NWBNamespaceBuilder('Extension for us in my Lab', "pynwb_test_extension1")
ext2 = NWBGroupSpec('A custom ElectricalSeries for my lab',
attributes=[NWBAttributeSpec(name='trode_id', doc='the tetrode id', dtype='int')],
neurodata_type_inc='ElectricalSeries',
neurodata_type_def='TetrodeSeries')
ns_builder2.add_spec(self.ext_source2, ext2)
ns_builder2.export(self.ns_path2, outdir=self.tempdir)
type_map = get_type_map(extensions=os.path.join(self.tempdir, self.ns_path1))
with self.assertWarnsRegex(UserWarning, r"ignoring namespace '\S+' because it already exists"):
type_map.load_namespaces(os.path.join(self.tempdir, self.ns_path2))
s = set(ns_deps.keys()) # determine which namespaces are the most
for k in ns_deps: # specific (i.e. extensions) and validate
s -= ns_deps[k].keys() # against those
namespaces = list(sorted(s))
if len(namespaces) > 0:
tm = TypeMap(catalog)
manager = BuildManager(tm)
specloc = "cached namespace information"
else:
manager = None
namespaces = available_namespaces()
specloc = "pynwb namespace information"
print("The file {} has no cached namespace information. "
"Falling back to {}.".format(path, specloc), file=sys.stderr)
elif args.nspath:
catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
namespaces = catalog.load_namespaces(args.nspath)
if len(namespaces) == 0:
print("Could not load namespaces from file {}.".format(args.nspath), file=sys.stderr)
sys.exit(1)
tm = TypeMap(catalog)
manager = BuildManager(tm)
specloc = "--nspath namespace information"
else:
manager = None
namespaces = available_namespaces()
specloc = "pynwb namespace information"
if args.ns:
if args.ns in namespaces:
sys.exit(1)
if args.cached_namespace:
print("Turning off validation against cached namespace information"
"as --nspath was passed.", file=sys.stderr)
args.cached_namespace = False
for path in args.paths:
if not os.path.isfile(path):
print("The file {} does not exist.".format(path), file=sys.stderr)
ret = 1
continue
if args.cached_namespace:
catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
ns_deps = NWBHDF5IO.load_namespaces(catalog, path)
s = set(ns_deps.keys()) # determine which namespaces are the most
for k in ns_deps: # specific (i.e. extensions) and validate
s -= ns_deps[k].keys() # against those
namespaces = list(sorted(s))
if len(namespaces) > 0:
tm = TypeMap(catalog)
manager = BuildManager(tm)
specloc = "cached namespace information"
else:
manager = None
namespaces = available_namespaces()
specloc = "pynwb namespace information"
print("The file {} has no cached namespace information. "
"Falling back to {}.".format(path, specloc), file=sys.stderr)
elif args.nspath:
# points defines a triangular face, and the mesh is comprised of a collection of triangular faces.
#
# First, we set up our extension. I am going to use the name `ecog`
from pynwb.spec import NWBDatasetSpec, NWBNamespaceBuilder, NWBGroupSpec
name = 'ecog'
ns_path = name + ".namespace.yaml"
ext_source = name + ".extensions.yaml"
# Now we define the data structures. We use `NWBDataInterface` as the base type,
# which is the most primitive type you are likely to use as a base. The name of the
# class is `CorticalSurface`, and it requires two matrices, `vertices` and
# `faces`.
surface = NWBGroupSpec(doc='brain cortical surface',
datasets=[
NWBDatasetSpec(doc='faces for surface, indexes vertices', shape=(None, 3),
name='faces', dtype='uint', dims=('face_number', 'vertex_index')),
NWBDatasetSpec(doc='vertices for surface, points in 3D space', shape=(None, 3),
name='vertices', dtype='float', dims=('vertex_number', 'xyz'))],
neurodata_type_def='CorticalSurface',
neurodata_type_inc='NWBDataInterface')
# Now we set up the builder and add this object
ns_builder = NWBNamespaceBuilder(name + ' extensions', name)
ns_builder.add_spec(ext_source, surface)
ns_builder.export(ns_path)
################
# The above should generate 2 YAML files. `ecog.extensions.yaml`,