Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
datatype = h5py.special_dtype(vlen=unicode)
kwds = {}
if zlib:
# only add compression related keyword arguments if relevant (h5py
# chokes otherwise)
kwds['compression'] = 'gzip'
kwds['compression_opts'] = complevel
kwds['shuffle'] = shuffle
return super(Group, self).create_variable(
varname, dimensions, dtype=datatype, fletcher32=fletcher32,
chunks=chunksizes, fillvalue=fill_value, **kwds)
class Dataset(core.File, Group, HasAttributesMixin):
_cls_name = 'h5netcdf.legacyapi.Dataset'
self._phony_dim_count = 0
if phony_dims not in ['sort', 'access']:
raise ValueError('unknown value %r for phony_dims\n'
'Use phony_dims=%r for sorted naming, '
'phony_dims=%r for per access naming.'
% (phony_dims, 'sort', 'access'))
# These maps keep track of dimensions in terms of size (might be
# unlimited), current size (identical to size for limited dimensions),
# their position, and look-up for HDF5 datasets corresponding to a
# dimension.
self._dim_sizes = ChainMap()
self._current_dim_sizes = ChainMap()
self._dim_order = ChainMap()
self._all_h5groups = ChainMap(self._h5group)
super(File, self).__init__(self, self._h5path)
# initialize all groups to detect/create phony dimensions
# mimics netcdf-c style naming
if phony_dims == 'sort':
self._determine_phony_dimensions()