Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import numpy as np
import ast
import jsonpickle
from jsonpickle.compat import unicode
__all__ = ['register_handlers', 'unregister_handlers']
class NumpyBaseHandler(jsonpickle.handlers.BaseHandler):
def restore_dtype(self, data):
dtype = data['dtype']
if dtype.startswith(('{', '[')):
return ast.literal_eval(dtype)
return np.dtype(dtype)
def flatten_dtype(self, dtype, data):
if hasattr(dtype, 'tostring'):
data['dtype'] = dtype.tostring()
else:
dtype = unicode(dtype)
prefix = '(numpy.record, '
if dtype.startswith(prefix):
dtype = dtype[len(prefix):-1]
from netzob.Common.Utils.Decorators import typeCheck
class UUIDJsonPickleHandler(jsonpickle.handlers.BaseHandler):
"""Handler used to process serialization of 'uuid' objects, as
they are, by default, un-deserializable with jsonpickle.
"""
def flatten(self, obj, data):
data['hex'] = obj.get_hex()
return data
def restore(self, data):
return uuid.UUID(hex=data['hex'])
class BitarrayJsonPickleHandler(jsonpickle.handlers.BaseHandler):
"""Handler used to process serialization of 'uuid' objects, as
they are, by default, un-deserializable with jsonpickle.
"""
def flatten(self, obj, data):
data['value01'] = obj.to01()
return data
def restore(self, data):
return bitarray.bitarray(data['value01'])
jsonpickle.handlers.registry.register(uuid.UUID, UUIDJsonPickleHandler)
jsonpickle.handlers.registry.register(bitarray.bitarray, BitarrayJsonPickleHandler)
class Serializer(object):
"""Class providing static methods for object serialization and
import jsonpickle
from anoncreds.protocol.types import PublicKey, RevocationPublicKey, \
SecretKey, RevocationSecretKey, AccumulatorSecretKey
from anoncreds.protocol.utils import toDictWithStrValues, fromDictWithStrValues
DATA_KEY = 'py/integer-element'
class CommonIntegerElementHandler(jsonpickle.handlers.BaseHandler):
def flatten(self, obj, data):
data[DATA_KEY] = obj.toStrDict()
return data
def restore(self, obj):
cls = self._getClass()
return cls.fromStrDict(obj[DATA_KEY])
def _getClass(self):
raise NotImplemented
class PublicKeyHandler(CommonIntegerElementHandler):
def _getClass(self):
return PublicKey
import sys
import datetime
import collections
import jsonpickle
class DatetimeHandler(jsonpickle.handlers.BaseHandler):
"""
Datetime objects use __reduce__, and they generate binary strings encoding
the payload. This handler encodes that payload to reconstruct the
object.
"""
_handles = datetime.datetime, datetime.date, datetime.time
def flatten(self, obj, data):
pickler = self._base
if not pickler.unpicklable:
return unicode(obj)
cls, args = obj.__reduce__()
flatten = pickler.flatten
args = [args[0].encode('base64')] + [flatten(i, reset=False) for i in args[1:]]
data['__reduce__'] = (flatten(cls, reset=False), args)
return data
def add_context(obj: object, override_handlers: bool = True, key: str = None) -> None:
"""
Adds extra metadata to the sentry log
:param obj: Any object (non-primitive)
:param override_handlers: Override some serialization handlers to reduce the output sent to Sentry
:param key: Name of the object to be inserted into the context, may be None to use the classname of obj
"""
import jsonpickle.handlers # noqa: F811, flake8 issue with "if TYPE_CHECKING"
from pros.conductor.templates import BaseTemplate
class TemplateHandler(jsonpickle.handlers.BaseHandler):
"""
Override how templates get pickled by JSON pickle - we don't want to send all of the data about a template
from an object
"""
from pros.conductor.templates import BaseTemplate
def flatten(self, obj: BaseTemplate, data):
rv = {
'name': obj.name,
'version': obj.version,
'target': obj.target,
}
if hasattr(obj, 'location'):
rv['location'] = obj.location
if hasattr(obj, 'origin'):
rv['origin'] = obj.origin
import json
import logging
import bitarray
#+---------------------------------------------------------------------------+
#| Related third party imports |
#+---------------------------------------------------------------------------+
import jsonpickle
#+---------------------------------------------------------------------------+
#| Local application imports |
#+---------------------------------------------------------------------------+
from netzob.Common.Utils.Decorators import typeCheck
class UUIDJsonPickleHandler(jsonpickle.handlers.BaseHandler):
"""Handler used to process serialization of 'uuid' objects, as
they are, by default, un-deserializable with jsonpickle.
"""
def flatten(self, obj, data):
data['hex'] = obj.get_hex()
return data
def restore(self, data):
return uuid.UUID(hex=data['hex'])
class BitarrayJsonPickleHandler(jsonpickle.handlers.BaseHandler):
"""Handler used to process serialization of 'uuid' objects, as
they are, by default, un-deserializable with jsonpickle.
"""
except TypeError as e:
raise SerializationError(e)
def dumps(self, obj):
try:
return self.pickle_module.dumps(
obj, self.pickle_module.HIGHEST_PROTOCOL)
except TypeError as e:
raise SerializationError(e)
try:
import jsonpickle
except ImportError:
__all__.remove("JSONPickler")
else:
class NumpyFloatHandler(jsonpickle.handlers.BaseHandler):
"""
Automatic conversion of numpy float to python floats
Required for jsonpickle to work correctly
"""
def flatten(self, obj, data):
"""
Converts and rounds a Numpy.float* to Python float
"""
return round(obj,6)
jsonpickle.handlers.registry.register(numpy.float, NumpyFloatHandler)
jsonpickle.handlers.registry.register(numpy.float32, NumpyFloatHandler)
jsonpickle.handlers.registry.register(numpy.float64, NumpyFloatHandler)
class NumpyIntHandler(jsonpickle.handlers.BaseHandler):
"""
# Save series as two rows rather than two cols to make preserving the
# type easier.
data = self.pp.flatten_pandas(obj.to_frame().T.to_csv(), data, meta)
return data
def restore(self, data):
csv, meta = self.pp.restore_pandas(data)
params = make_read_csv_params(meta)
df = pd.read_csv(StringIO(csv), **params)
ser = pd.Series(data=df.iloc[:, 1:].values[0],
index=df.columns[1:].values,
name=meta.get('name', None))
return ser
class PandasIndexHandler(BaseHandler):
pp = PandasProcessor()
index_constructor = pd.Index
def name_bundler(self, obj):
return {'name': obj.name}
def flatten(self, obj, data):
name_bundle = self.name_bundler(obj)
meta = dict(dtype=str(obj.dtype), **name_bundle)
buf = encode(obj.tolist())
data = self.pp.flatten_pandas(buf, data, meta)
return data
def restore(self, data):
buf, meta = self.pp.restore_pandas(data)
from jsonpickle import handlers
def register_ndarray_handler():
H5BackendLinkageHandler.handles(np.ndarray)
for t in NumpyExtractedDtypeHandler.np_dtypes:
NumpyExtractedDtypeHandler.handles(t)
def register_all_handlers():
register_ndarray_handler()
import mdtraj
TopologyHandler.handles(mdtraj.Topology)
class H5BackendLinkageHandler(handlers.BaseHandler):
""" stores NumPy arrays in the backing hdf5 file contained in the context """
def __init__(self, context):
if not hasattr(context, 'h5_file'):
raise ValueError('the given un/-pickler has to contain a hdf5 file reference.')
from jsonpickle.pickler import Pickler
if isinstance(self, Pickler) and not hasattr(context, 'next_array_id'):
raise ValueError('the given pickler has to contain an array id provider')
super(H5BackendLinkageHandler, self).__init__(context=context)
@property
def file(self):
# obtain the current file handler
return self.context.h5_file
def next_array_id(self):
class ClientLog(SqlTableEntity):
__tablename__ = 'client_log'
id = Column(Integer, primary_key=True)
userId = Column(ForeignKey(u'user.id'), nullable=False, index=True)
message = Column(Text(collation=u'utf8_unicode_ci'), nullable=True)
trace = Column(Text(collation=u'utf8_unicode_ci'), nullable=True, default=None)
mode = Column(Enum(u'DEBUG', u'INFO', u'WARNING', u'ERROR', u'CRITICAL_ERROR'), nullable=False,
server_default=text("'DEBUG'"))
createdOn = Column(DateTime, nullable=False, default=datetime.utcnow, index=True)
user = relationship(u'User', primaryjoin='ClientLog.userId == User.id')
class MyBaseObject(handlers.BaseHandler):
def flatten(self, obj, data):
state = obj.__dict__.copy()
for key in state:
if isinstance(state[key], Base):
state[key] = state[key].__dict__.copy()
del state[key]['_sa_instance_state']
del state['_sa_instance_state']
return state
handlers.register(Base, MyBaseObject, base=True)
if __name__ == '__main__':
pass
SqlTableEntity.metadata