How to use the h5py.get_config function in h5py

To help you get started, we’ve selected a few h5py examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github AllenInstitute / bmtk / bmtk / simulator / bionet / modules / record_netcons.py View on Github external
import os
import csv
import h5py
import numpy as np
from neuron import h

from .sim_module import SimulatorMod
from bmtk.simulator.bionet.biocell import BioCell
# from bmtk.simulator.bionet.io_tools import io
# from bmtk.simulator.bionet.pointprocesscell import PointProcessCell
from bmtk.utils.reports import CompartmentReport

try:
    # Check to see if h5py is built to run in parallel
    if h5py.get_config().mpi:
        MembraneRecorder = CompartmentReport  # cell_vars.CellVarRecorderParallel
    else:
        MembraneRecorder = CompartmentReport  # cell_vars.CellVarRecorder
except Exception as e:
    MembraneRecorder = CompartmentReport  # cell_vars.CellVarRecorder

pc = h.ParallelContext()
MPI_RANK = int(pc.id())
N_HOSTS = int(pc.nhost())


class NetconReport(SimulatorMod):
    def __init__(self, tmp_dir, file_name, variable_name, cells, sections='all', syn_type='Exp2Syn', buffer_data=True,
                 transform={}):
        """Module used for saving NEURON cell properities at each given step of the simulation.
github pyscf / pyscf / pyscf / lib / misc.py View on Github external
from threading import Thread
from multiprocessing import Queue, Process
try:
    from concurrent.futures import ThreadPoolExecutor
except ImportError:
    ThreadPoolExecutor = None

from pyscf.lib import param
from pyscf import __config__

if h5py.version.version[:4] == '2.2.':
    sys.stderr.write('h5py-%s is found in your environment. '
                     'h5py-%s has bug in threading mode.\n'
                     'Async-IO is disabled.\n' % ((h5py.version.version,)*2))
if h5py.version.version[:2] == '3.':
    h5py.get_config().default_file_mode = 'a'

c_double_p = ctypes.POINTER(ctypes.c_double)
c_int_p = ctypes.POINTER(ctypes.c_int)
c_null_ptr = ctypes.POINTER(ctypes.c_void_p)

def load_library(libname):
# numpy 1.6 has bug in ctypeslib.load_library, see numpy/distutils/misc_util.py
    if '1.6' in numpy.__version__:
        if (sys.platform.startswith('linux') or
            sys.platform.startswith('gnukfreebsd')):
            so_ext = '.so'
        elif sys.platform.startswith('darwin'):
            so_ext = '.dylib'
        elif sys.platform.startswith('win'):
            so_ext = '.dll'
        else:
github spyking-circus / spyking-circus / circus / files / kwd.py View on Github external
import numpy, re, sys, logging
from circus.shared.messages import print_and_log
from .hdf5 import H5File

import warnings
with warnings.catch_warnings():
    warnings.filterwarnings("ignore",category=FutureWarning)
    import h5py

logger = logging.getLogger(__name__)

class KwdFile(H5File):

    description    = "kwd"    
    extension      = [".kwd"]
    parallel_write = h5py.get_config().mpi
    is_streamable  = ['multi-files', 'single-file']

    _required_fields = {'sampling_rate'    : float}
    
    _default_values  = {'recording_number'  : 0, 
                       'dtype_offset'       : 'auto',
                       'gain'               : 1.}

    def set_streams(self, stream_mode):
        
        if stream_mode == 'single-file':
            
            sources     = []
            to_write    = []
            count       = 0
            params      = self.get_description()
github PyTables / PyTables / tables / backend_h5py.py View on Github external
def set_complex_names():
    cfg = h5py.get_config()
    cfg.complex_names = ('real', 'imag')
github spyking-circus / spyking-circus / circus / shared / utils.py View on Github external
def get_parallel_hdf5_flag(params):
    ''' Get parallel HDF5 flag.

    Argument
    --------
    params: dict
        Dictionnary of parameters.

    Return
    ------
    flag: bool
        True if parallel HDF5 is available and the user want to use it.
    '''

    flag = h5py.get_config().mpi and params.getboolean('data', 'parallel_hdf5')

    return flag
github spyking-circus / spyking-circus / circus / files / nwb.py View on Github external
import numpy, re, sys
from .hdf5 import H5File

import warnings
with warnings.catch_warnings():
    warnings.filterwarnings("ignore",category=FutureWarning)
    import h5py

class NWBFile(H5File):

    description    = "nwb"    
    extension      = [".nwb", ".h5", ".hdf5"]
    parallel_write = h5py.get_config().mpi
    is_writable    = True
github helmholtz-analytics / heat / heat / core / io.py View on Github external
__NETCDF_DIM_TEMPLATE = "{}_dim_{}"

__all__ = ["load", "load_csv", "save"]


try:
    import h5py
except ImportError:
    # HDF5 support is optional
    def supports_hdf5():
        return False


else:
    # warn the user about serial hdf5
    if not h5py.get_config().mpi and MPI_WORLD.rank == 0:
        warnings.warn(
            "h5py does not support parallel I/O, falling back to slower serial I/O", ImportWarning
        )

    # add functions to exports
    __all__.extend(["load_hdf5", "save_hdf5"])

    def supports_hdf5():
        return True

    def load_hdf5(path, dataset, dtype=types.float32, split=None, device=None, comm=None):
        """
        Loads data from an HDF5 file. The data may be distributed among multiple processing nodes via the split flag.

        Parameters
        ----------