How to use the jsonpickle.pickler.Pickler function in jsonpickle

To help you get started, we’ve selected a few jsonpickle examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github jsonpickle / jsonpickle / tests / datetime_tests.py View on Github external
def test_unpickleable(self):
        """
        If 'unpickleable' is set on the Pickler, the date objects should be
        simple, human-readable strings.
        """
        obj = datetime.datetime.now()
        pickler = jsonpickle.pickler.Pickler(unpicklable=False)
        flattened = pickler.flatten(obj)
        self.assertEqual(str(obj), flattened)
github jsonpickle / jsonpickle / tests / datetime_tests.py View on Github external
def setUp(self):
        self.pickler = jsonpickle.pickler.Pickler()
        self.unpickler = jsonpickle.unpickler.Unpickler()
github Almenon / AREPL-electron / src / python / pythonEvaluator.py View on Github external
class execArgs(object):
    def __init__(self, savedCode, evalCode, *args, **kwargs):
        self.savedCode = savedCode
        self.evalCode = evalCode
        #self.action = action

class returnInfoJson(object):
    def __init__(self, ERROR, userVariables, execTime, totalTime, *args, **kwargs):
        self.ERROR = ERROR
        self.userVariables = userVariables
        self.execTime = execTime
        self.totalTime = totalTime


class customPickler(jsonpickle.pickler.Pickler):
    """
    encodes float values like inf / nan as strings to follow JSON spec while keeping meaning
    Im doing this in custom class because handlers do not fire for floats
    """
    inf = float('inf')
    negativeInf = float('-inf')

    def _get_flattener(self, obj):
        if type(obj) == type(float()):
            if obj == self.inf:
                return lambda obj: 'Infinity'
            if obj == self.negativeInf:
                return lambda obj: '-Infinity'
            if isnan(obj):
                return lambda obj: 'NaN'
        return super(customPickler, self)._get_flattener(obj)
github purduesigbots / pros-cli / pros / common / sentry.py View on Github external
}
            if hasattr(obj, 'location'):
                rv['location'] = obj.location
            if hasattr(obj, 'origin'):
                rv['origin'] = obj.origin
            return rv

        def restore(self, obj):
            raise NotImplementedError

    if override_handlers:
        jsonpickle.handlers.register(BaseTemplate, TemplateHandler, base=True)

    from sentry_sdk import configure_scope
    with configure_scope() as scope:
        scope.set_extra((key or obj.__class__.__qualname__), jsonpickle.pickler.Pickler(unpicklable=False).flatten(obj))

    if override_handlers:
        jsonpickle.handlers.unregister(BaseTemplate)
github girder / girder_worker / girder_worker / app.py View on Github external
routing_key=routing_key,
                                               headers=headers,
                                               properties=properties,
                                               declare=declare,
                                               retry_policy=retry_policy,
                                               **kwargs)
        if 'girder_result_hooks' in headers:
            if job is not None:
                for result_hook in headers['girder_result_hooks']:
                    if isinstance(result_hook, ResultTransform):
                        result_hook.job = job

            # Celery task headers are not automatically serialized by celery
            # before being passed off to ampq for byte packing. We will have
            # to do that here.
            p = jsonpickle.pickler.Pickler()
            headers['girder_result_hooks'] = \
                [p.flatten(grh) for grh in headers['girder_result_hooks']]

        # Finally,  remove all reserved_options from headers
        for key in Task.reserved_options:
            headers.pop(key, None)
    except Exception:
        logger.exception('An error occurred in girder_before_task_publish.')
        raise
github thundra-io / thundra-lambda-agent-python / thundra / plugins / trace / traceable.py View on Github external
def __serialize_value__(self, value):
        if self.__is_serializable__(value):
            return value
        elif isinstance(value, Serializable):
            return value.serialize()
        try:
            pickler = jsonpickle.pickler.Pickler(max_depth=3)
            value_dict = pickler.flatten(value, reset=True)
            return value_dict
        except:
            return ''
github TUM-DAML / seml / seml / evaluation.py View on Github external
def parse_jsonpickle(db_entry):
    import jsonpickle.ext.numpy as jsonpickle_numpy

    jsonpickle_numpy.register_handlers()
    try:
        p = jsonpickle.pickler.Pickler(keys=False)
        parsed = jsonpickle.loads(json.dumps(db_entry, default=p.flatten), keys=False)
    except IndexError:
        parsed = db_entry
    return parsed
github microsoft / botbuilder-python / libraries / botbuilder-core / botbuilder / core / bot_state.py View on Github external
def compute_hash(self, obj: object) -> str:
        return str(Pickler().flatten(obj))
github thundra-io / thundra-lambda-agent-python / thundra / plugins / trace / traceable.py View on Github external
_trace_local_variables_ = False
    _trace_lines_with_source = False
    _traceable = __get_traceable_from_back_frame(frame)
    _scope = __get_scope_from_back_frame(frame)

    if not _scope or not _scope.span:
        return

    if _traceable:
        _trace_local_variables_ = _traceable._trace_local_variables
        _trace_lines_with_source = _traceable._trace_lines_with_source

    _local_vars = []
    if _trace_local_variables_:
        pickler = jsonpickle.pickler.Pickler(max_depth=3)
        for l in frame.f_locals:
            _local_var_value = frame.f_locals[l]
            _local_var_type = type(_local_var_value).__name__
            try:
                _local_var_value = pickler.flatten(_local_var_value, reset=True)
            except Exception as e:
                _local_var_value = ''
            _local_var = {
                'name': l,
                'value': _local_var_value,
                'type': _local_var_type
            }
            _local_vars.append(_local_var)

    method_line = {
        'line': _line_no,