How to use the hdmf.backends.hdf5.HDF5IO function in hdmf

To help you get started, we’ve selected a few hdmf examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github NeurodataWithoutBorders / pynwb / tests / integration / ui_write / test_modular_storage.py View on Github external
def test_link_root(self):
        # create and write data file
        data_file = NWBFile(
            session_description='a test file',
            identifier='data_file',
            session_start_time=self.start_time
        )
        data_file.add_acquisition(self.container)

        with HDF5IO(self.data_filename, 'w', manager=get_manager()) as data_write_io:
            data_write_io.write(data_file)

        # read data file
        manager = get_manager()
        with HDF5IO(self.data_filename, 'r', manager=manager) as data_read_io:
            data_file_obt = data_read_io.read()

            link_file = NWBFile(
                session_description='a test file',
                identifier='link_file',
                session_start_time=self.start_time
            )
            link_container = data_file_obt.acquisition[self.container.name]
            link_file.add_acquisition(link_container)
            self.assertIs(link_container.parent, data_file_obt)
github NeurodataWithoutBorders / pynwb / tests / integration / ui_write / base.py View on Github external
def roundtripContainer(self, cache_spec=False):
        description = 'a file to test writing and reading a %s' % self.container_type
        identifier = 'TEST_%s' % self.container_type
        nwbfile = NWBFile(description, identifier, self.start_time, file_create_date=self.create_date)
        self.addContainer(nwbfile)

        self.writer = HDF5IO(self.filename, manager=get_manager(), mode='w')
        self.writer.write(nwbfile, cache_spec=cache_spec)
        self.writer.close()
        self.reader = HDF5IO(self.filename, manager=get_manager(), mode='r')
        self.read_nwbfile = self.reader.read()

        try:
            tmp = self.getContainer(self.read_nwbfile)
            return tmp
        except Exception as e:
            self.reader.close()
            self.reader = None
            raise e
github NeurodataWithoutBorders / pynwb / tests / integration / test_io.py View on Github external
def test_write_clobber(self):
        with File(self.path, 'w') as fil:
            with HDF5IO(self.path, manager=self.manager, file=fil, mode='a') as io:
                io.write(self.container)

        with self.assertRaisesWith(UnsupportedOperation,
                                   "Unable to open file %s in 'w-' mode. File already exists." % self.path):
            with HDF5IO(self.path, manager=self.manager, mode='w-') as io:
                pass
github NeurodataWithoutBorders / pynwb / tests / integration / test_io.py View on Github external
def test_write_cache_spec(self):
        '''
        Round-trip test for writing spec and reading it back in
        '''
        with HDF5IO(self.path, manager=self.manager, mode="a") as io:
            io.write(self.container)
        with File(self.path, 'r') as f:
            self.assertIn('specifications', f)

        ns_catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
        HDF5IO.load_namespaces(ns_catalog, self.path)
        original_ns = self.manager.namespace_catalog.get_namespace('core')
        cached_ns = ns_catalog.get_namespace('core')
        self.maxDiff = None
        for key in ('author', 'contact', 'doc', 'full_name', 'name'):
            with self.subTest(namespace_field=key):
                self.assertEqual(original_ns[key], cached_ns[key])
        for dt in original_ns.get_registered_types():
            with self.subTest(neurodata_type=dt):
                original_spec = original_ns.get_spec(dt)
                cached_spec = cached_ns.get_spec(dt)
github NeurodataWithoutBorders / pynwb / tests / integration / hdf5 / test_nwbfile.py View on Github external
def test_read(self):
        """ Test reading the NWBFile using HDF5IO """
        hdf5io = HDF5IO(self.filename, manager=self.manager, mode='w')
        hdf5io.write(self.nwbfile)
        hdf5io.close()

        hdf5io = HDF5IO(self.filename, manager=self.manager, mode='r')
        container = hdf5io.read()
        self.assertIsInstance(container, NWBFile)
        self.assertEqual(len(container.acquisition), 1)
        self.assertEqual(len(container.analysis), 1)
        for v in container.acquisition.values():
            self.assertIsInstance(v, TimeSeries)
        self.assertContainerEqual(container, self.nwbfile)
        hdf5io.close()
github NeurodataWithoutBorders / pynwb / tests / integration / ui_write / base.py View on Github external
def roundtripContainer(self, cache_spec=False):
        description = 'a file to test writing and reading a %s' % self.container_type
        identifier = 'TEST_%s' % self.container_type
        nwbfile = NWBFile(description, identifier, self.start_time, file_create_date=self.create_date)
        self.addContainer(nwbfile)

        self.writer = HDF5IO(self.filename, manager=get_manager(), mode='w')
        self.writer.write(nwbfile, cache_spec=cache_spec)
        self.writer.close()
        self.reader = HDF5IO(self.filename, manager=get_manager(), mode='r')
        self.read_nwbfile = self.reader.read()

        try:
            tmp = self.getContainer(self.read_nwbfile)
            return tmp
        except Exception as e:
            self.reader.close()
            self.reader = None
            raise e
github NeurodataWithoutBorders / pynwb / tests / integration / ui_write / test_nwbfile.py View on Github external
def test_read(self):
        hdf5io = HDF5IO(self.path, manager=self.manager, mode='a')
        hdf5io.write(self.container)
        hdf5io.close()
        hdf5io = HDF5IO(self.path, manager=self.manager, mode='a')
        container = hdf5io.read()
        self.assertIsInstance(container, NWBFile)
        raw_ts = container.acquisition
        self.assertEqual(len(raw_ts), 1)
        self.assertEqual(len(container.analysis), 1)
        for v in raw_ts.values():
            self.assertIsInstance(v, TimeSeries)
        hdf5io.close()
github NeurodataWithoutBorders / pynwb / tests / integration / ui_write / test_modular_storage.py View on Github external
def test_link_root(self):
        # create and write data file
        data_file = NWBFile(
            session_description='a test file',
            identifier='data_file',
            session_start_time=self.start_time
        )
        data_file.add_acquisition(self.container)

        with HDF5IO(self.data_filename, 'w', manager=get_manager()) as data_write_io:
            data_write_io.write(data_file)

        # read data file
        manager = get_manager()
        with HDF5IO(self.data_filename, 'r', manager=manager) as data_read_io:
            data_file_obt = data_read_io.read()

            link_file = NWBFile(
                session_description='a test file',
                identifier='link_file',
                session_start_time=self.start_time
            )
            link_container = data_file_obt.acquisition[self.container.name]
            link_file.add_acquisition(link_container)
            self.assertIs(link_container.parent, data_file_obt)

            with HDF5IO(self.link_filename, 'w', manager=manager) as link_write_io:
                link_write_io.write(link_file)

        # read the link file, check container sources
        with HDF5IO(self.link_filename, 'r+', manager=get_manager()) as link_file_reader:
github NeurodataWithoutBorders / pynwb / tests / integration / ui_write / test_modular_storage.py View on Github external
def roundtripContainer(self):
        # create and write data file
        data_file = NWBFile(
            session_description='a test file',
            identifier='data_file',
            session_start_time=self.start_time
        )
        data_file.add_acquisition(self.container)

        with HDF5IO(self.data_filename, 'w', manager=get_manager()) as data_write_io:
            data_write_io.write(data_file)

        # read data file
        with HDF5IO(self.data_filename, 'r', manager=get_manager()) as self.data_read_io:
            data_file_obt = self.data_read_io.read()

            # write "link file" with timeseries.data that is an external link to the timeseries in "data file"
            # also link timeseries.timestamps.data to the timeseries.timestamps in "data file"
            with HDF5IO(self.link_filename, 'w', manager=get_manager()) as link_write_io:
                link_file = NWBFile(
                    session_description='a test file',
                    identifier='link_file',
                    session_start_time=self.start_time
                )
                self.link_container = TimeSeries(
                    name='test_mod_ts',
github NeurodataWithoutBorders / pynwb / src / pynwb / __init__.py View on Github external
@docval({'name': 'io', 'type': HDMFIO,
         'doc': 'the HDMFIO object to read from'},
        {'name': 'namespace', 'type': str,
         'doc': 'the namespace to validate against', 'default': CORE_NAMESPACE},
        returns="errors in the file", rtype=list,
        is_method=False)
def validate(**kwargs):
    """Validate an NWB file against a namespace"""
    io, namespace = getargs('io', 'namespace', kwargs)
    builder = io.read_builder()
    validator = ValidatorMap(io.manager.namespace_catalog.get_namespace(name=namespace))
    return validator.validate(builder)


class NWBHDF5IO(_HDF5IO):

    @docval({'name': 'path', 'type': str, 'doc': 'the path to the HDF5 file'},
            {'name': 'mode', 'type': str,
             'doc': 'the mode to open the HDF5 file with, one of ("w", "r", "r+", "a", "w-")'},
            {'name': 'load_namespaces', 'type': bool,
             'doc': 'whether or not to load cached namespaces from given path', 'default': False},
            {'name': 'manager', 'type': BuildManager, 'doc': 'the BuildManager to use for I/O', 'default': None},
            {'name': 'extensions', 'type': (str, TypeMap, list),
             'doc': 'a path to a namespace, a TypeMap, or a list consisting paths \
             to namespaces and TypeMaps', 'default': None},
            {'name': 'file', 'type': h5py.File, 'doc': 'a pre-existing h5py.File object', 'default': None},
            {'name': 'comm', 'type': "Intracomm", 'doc': 'the MPI communicator to use for parallel I/O',
             'default': None})
    def __init__(self, **kwargs):
        path, mode, manager, extensions, load_namespaces, file_obj, comm =\
            popargs('path', 'mode', 'manager', 'extensions', 'load_namespaces', 'file', 'comm', kwargs)