How to use the pynwb.NWBHDF5IO function in pynwb

To help you get started, we’ve selected a few pynwb examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github NeurodataWithoutBorders / pynwb / tests / integration / hdf5 / test_scratch.py View on Github external
def roundtrip_scratch(self, data, case, **kwargs):
        self.filename = 'test_scratch_%s.nwb' % case
        description = 'a file to test writing and reading a scratch data of type %s' % case
        identifier = 'TEST_scratch_%s' % case
        nwbfile = NWBFile(description, identifier, self.start_time, file_create_date=self.create_date)
        nwbfile.add_scratch(data, name='foo', notes='test scratch', **kwargs)

        self.writer = NWBHDF5IO(self.filename, mode='w')
        self.writer.write(nwbfile)
        self.writer.close()

        self.reader = NWBHDF5IO(self.filename, mode='r')
        self.read_nwbfile = self.reader.read()
        return self.read_nwbfile.get_scratch('foo')
github NeurodataWithoutBorders / pynwb / src / pynwb / testing / make_test_files.py View on Github external
def _write(test_name, nwbfile):
    filename = 'tests/back_compat/%s_%s.nwb' % (__version__, test_name)

    with NWBHDF5IO(filename, 'w') as io:
        io.write(nwbfile)

    with NWBHDF5IO(filename, 'r') as io:
        validate(io)
        nwbfile = io.read()
github flatironinstitute / CaImAn / caiman / source_extraction / cnmf / estimates.py View on Github external
emission_lambda=emission_lambda)
            nwbfile.create_imaging_plane(name='ImagingPlane',
                                         optical_channel=optical_channel,
                                         description=imaging_plane_description,
                                         device=device,
                                         excitation_lambda=excitation_lambda,
                                         imaging_rate=imaging_rate,
                                         indicator=indicator,
                                         location=location)
            if raw_data_file:
                nwbfile.add_acquisition(ImageSeries(name='TwoPhotonSeries',
                                                    external_file=[raw_data_file],
                                                    format='external',
                                                    rate=imaging_rate,
                                                    starting_frame=[0]))
            with NWBHDF5IO(filename, 'w') as io:
                io.write(nwbfile)

        time.sleep(4)  # ensure the file is fully closed before opening again in append mode
        logging.info('Saving the results in the NWB file...')

        with NWBHDF5IO(filename, 'r+') as io:
            nwbfile = io.read()
            # Add processing results

            # Create the module as 'ophys' unless it is taken and append 'ophysX' instead
            ophysmodules = [s[5:] for s in list(nwbfile.modules) if s.startswith('ophys')]
            if any('' in s for s in ophysmodules):
                if any([s for s in ophysmodules if s.isdigit()]):
                    nummodules = max([int(s) for s in ophysmodules if s.isdigit()])+1
                    print('ophys module previously created, writing to ophys'+str(nummodules)+' instead')
                    mod = nwbfile.create_processing_module('ophys'+str(nummodules), 'contains caiman estimates for '
github NeurodataWithoutBorders / pynwb / docs / gallery / general / iterative_write.py View on Github external
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#

data.resize((8, 10))    # <-- Allocate the space with need
data[0:3, :] = 1        # <-- Write timesteps 0,1,2
data[3:6, :] = 2        # <-- Write timesteps 3,4,5,  Note timesteps 6,7 are not being initialized
io.close()              # <-- Close the file


####################
# Check the results
# ^^^^^^^^^^^^^^^^^

from pynwb import NWBHDF5IO    # noqa

io = NWBHDF5IO('basic_alternative_custom_write.nwb', mode='a')
nwbfile = io.read()
data = nwbfile.get_acquisition('synthetic_timeseries').data
print(data[:])
io.close()
github SpikeInterface / spikeextractors / spikeextractors / extractors / nwbextractors / nwbextractors.py View on Github external
# todo
        return NotImplementedError()
        check_nwb_install()
        if not isinstance(unit_id, int):
            raise TypeError("'unit_id' must be an integer")
        existing_ids = self.get_unit_ids()
        if not isinstance(feature_name, str):
            raise TypeError("'feature_name' must be a string")
        if 'spike_feature_' + feature_name in self.get_shared_unit_spike_feature_names():
            raise ValueError('spike_feature_' + feature_name + " feature already exists")

        if default_value is None:
            default_value = np.nan
        nspikes_units = self.get_nspikes()
        new_values = [[default_value] * nSpikes for nSpikes in nspikes_units]
        with NWBHDF5IO(self._path, 'a') as io:
            nwbfile = io.read()
            for id in unit_ids:
                spikes_unit = nwbfile.units['spike_times'][existing_ids.index(id)]
                if len(spikes_unit) != len(values[str(id)]):
                    io.close()
                    raise ValueError("feature values should have the same length" +
                                     " as the spike train, error at unit #" + str(id))
                new_values[existing_ids.index(id)] = values[str(id)]

            flatten_new_values = [item for sublist in new_values for item in sublist]
            spikes_index = np.cumsum(nspikes_units)
            nwbfile.add_unit_column(name='spike_feature_' + feature_name,
                                    description='no description',
                                    data=flatten_new_values,
                                    index=spikes_index)
            io.write(nwbfile)
github NeurodataWithoutBorders / pynwb / docs / gallery / domain / ophys.py View on Github external
# Writing (and reading) is carried out using :py:class:`~pynwb.NWBHDF5IO`.

from pynwb import NWBHDF5IO

with NWBHDF5IO('ophys_example.nwb', 'w') as io:
    io.write(nwbfile)

####################
# Reading an NWBFile
# ------------------
#
# Reading is carried out using the :py:class:`~pynwb.NWBHDF5IO` class. Unlike with writing, using
# :py:class:`~pynwb.NWBHDF5IO` as a context manager is not supported and will raise an exception [#]_.


io = NWBHDF5IO('ophys_example.nwb', 'r')
nwbfile = io.read()


####################
# Getting your data out
# ---------------------
#
# After you read the NWB file, you can access individual components of your data file.
# To get the :py:class:`~pynwb.base.ProcessingModule` back, you can index into the
# :py:func:`~pynwb.file.NWBFile.processing` attribute with the name of the
# :py:class:`~pynwb.base.ProcessingModule`.


mod = nwbfile.processing['ophys']

####################
github NeurodataWithoutBorders / pynwb / docs / gallery / examples / ca_imaging_example.py View on Github external
)

#####################
# only add the top level container?
ophys_module.add_container(dof)

#####################
# now write the file
from pynwb import NWBHDF5IO as HDF5IO
io = HDF5IO('ophys_badfile.nwb',mode='w')
io.write(nwb)
io.close()

#####################
# and try to read it back out
reader = HDF5IO('ophys_badfile.nwb',mode='r')
read_data = reader.read()
github SpikeInterface / spikeextractors / spikeextractors / extractors / nwbextractors / nwbextractors.py View on Github external
def get_epoch_names(self):
        '''This function returns a list of all the epoch names in the NWB file.

        Returns
        ----------
        epoch_names: list
            List of epoch names in the sorting extractor
        '''
        check_nwb_install()
        with NWBHDF5IO(self._path, 'r') as io:
            nwbfile = io.read()
            if nwbfile.epochs is None:
                print("No epochs in NWB file")
                return
            return [x[0] for x in nwbfile.epochs['tags'][:]]
github NeurodataWithoutBorders / pynwb / docs / gallery / general / file.py View on Github external
with NWBHDF5IO('basic_example.nwb', 'w') as io:
    io.write(nwbfile)

####################
# .. _basic_reading:
#
# Reading an NWB file
# -------------------
#
# As with writing, reading is also carried out using the :py:class:`~pynwb.NWBHDF5IO` class.
# To read the NWB file we just wrote, using construct another :py:class:`~pynwb.NWBHDF5IO` object,
# and use the :py:func:`~pynwb.form.backends.io.FORMIO.read` method to retrieve an
# :py:class:`~pynwb.file.NWBFile` object.

io = NWBHDF5IO('basic_example.nwb')
nwbfile = io.read()

####################
# For reading, we cannot use :py:class:`~pynwb.NWBHDF5IO` as a context manager, since the resulting
# :py:class:`~pynwb.NWBHDF5IO` gets closed and deleted when the context completes [#]_.

####################
# .. _basic_retrieving_data:
#
# Retrieving data from an NWB file
# --------------------------------
#
# Most of the methods we used above to write data are paired with a getter method for getting your data back.
#
# Lets start with the :py:class:`~pynwb.base.TimeSeries` object we wrote. Above, we added it as
# acquisition data using the method :py:func:`~pynwb.file.NWBFile.add_acquisition`. We can get it