Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
TABLE = 'TABLE'
'''Stored as pytable_
.. _pytable: http://pytables.github.io/usersguide/libref/structured_storage.html#the-table-class
'''
FRAME = 'FRAME'
''' Stored as pandas DataFrame_
.. _DataFrame: http://pandas.pydata.org/pandas-docs/dev/io.html#hdf5-pytables
'''
TYPE_FLAG_MAPPING = {
ObjectTable : TABLE,
list:ARRAY,
tuple:ARRAY,
dict: DICT,
np.ndarray:CARRAY,
np.matrix:CARRAY,
DataFrame : FRAME
}
''' Mapping from object type to storage flag'''
for item in pypetconstants.PARAMETER_SUPPORTED_DATA:
TYPE_FLAG_MAPPING[item]=ARRAY
FORMATTED_COLUMN_PREFIX = 'SRVC_COLUMN_%s_'
''' Stores data type of a specific pytables column for perfect reconstruction'''
for colname in table.colnames:
col = table.col(colname)
data_list=list(col)
prefix = HDF5StorageService.FORMATTED_COLUMN_PREFIX % colname
for idx,data in enumerate(data_list):
data,type_changed = self._all_recall_native_type(data,table,prefix)
if type_changed:
data_list[idx] = data
else:
break
if table_name in load_dict:
load_dict[table_name][colname] = data_list
else:
load_dict[table_name] = ObjectTable(data={colname:data_list})
except:
self._logger.error('Failed loading `%s` of `%s`.' % (table._v_name,full_name))
raise
def _store(self):
if not isinstance(self._data, Quantity):
return super(Brian2Parameter, self)._store()
else:
store_dict = {}
unit = get_unit_fast(self._data)
value = self._data/unit
store_dict['data' + Brian2Parameter.IDENTIFIER] = ObjectTable(data={'value': [value], 'unit': [repr(unit)]})
if self.f_has_range():
value_list = [value_with_unit/unit for value_with_unit in self._explored_range]
store_dict['explored_data' + Brian2Parameter.IDENTIFIER] = ObjectTable(data={'value': value_list})
self._locked = True
return store_dict
def _prm_store_dict_as_table(self, msg, key, data_to_store, group, fullname):
if key in group:
raise ValueError('Dictionary `%s` already exists in `%s`. Appending is not supported (yet).')
#assert isinstance(data_to_store,dict)
if key in group:
raise ValueError('Dict `%s` already exists in `%s`. Appending is not supported (yet).')
temp_dict={}
for innerkey, val in data_to_store.iteritems():
temp_dict[innerkey] =[val]
objtable = ObjectTable(data=temp_dict)
self._prm_store_into_pytable(msg,key,objtable,group,fullname)
try:
new_table = group._f_get_child(key)
except AttributeError:
new_table = group._f_getChild(key)
self._all_set_attributes_to_recall_natives(temp_dict,new_table,
HDF5StorageService.DATA_PREFIX)
setattr(new_table._v_attrs,HDF5StorageService.STORAGE_TYPE,
HDF5StorageService.DICT)
self._hdf5file.flush()
def __init__(self, data=None, index=None, columns=None, copy=False):
super(ObjectTable, self).__init__(data=data, index=index, columns=columns,
dtype=object, copy=copy)
def _supports(self, item):
"""Checks if outer data structure is supported."""
return type(item) in ((np.ndarray, ObjectTable,
DataFrame, Series, Panel, Panel4D,
dict, tuple, list, np.matrix) +
pypetconstants.PARAMETER_SUPPORTED_DATA)
data_list, name_list, hash_tuple = self._serialize_matrix(self._data)
rename_list = ['data%s%s' % (SparseParameter.IDENTIFIER, name)
for name in name_list]
is_dia = int(len(rename_list) == 4)
store_dict['data%sis_dia' % SparseParameter.IDENTIFIER] = is_dia
for idx, name in enumerate(rename_list):
store_dict[name] = data_list[idx]
if self.f_has_range():
# # Supports smart storage by hashing
smart_dict = {}
store_dict['explored_data' + SparseParameter.IDENTIFIER] = \
ObjectTable(columns=['idx', 'is_dia'],
index=list(range(len(self))))
count = 0
for idx, elem in enumerate(self._explored_range):
data_list, name_list, hash_tuple = self._serialize_matrix(elem)
# Use the hash_tuple as a key for the smart_dict
if hash_tuple in smart_dict:
name_idx = smart_dict[hash_tuple]
add = False
else:
name_idx = count
add = True
is_dia = int(len(name_list) == 4)
>>> res.ford = 'prefect'
>>> res.ford
'prefect'
:raises: TypeError:
If the data format in args or kwargs is not known to the result. Checks type of
outer data structure, i.e. checks if you have a list or dictionary.
But it does not check on individual values within dicts or lists.
"""
__slots__ = ('_data_',)
SUPPORTED_DATA = set((np.ndarray, ObjectTable,
DataFrame, Series, Panel,
dict, tuple, list, np.matrix) +
pypetconstants.PARAMETER_SUPPORTED_DATA)
def __init__(self, full_name, *args, **kwargs):
comment = kwargs.pop('comment', '')
super(Result, self).__init__(full_name, comment)
self._data_ = None
self._set_logger()
self.f_set(*args, **kwargs)
@property
def _data(self):
"""To avoid the overhead of producing an empty dictionary"""
if self._data_ is None:
self._data_ = {}