Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def default_mapping_deserializer(obj: dict, cls: type, **kwargs) -> Mapping:
"""
Deserialize a (JSON) dict into a mapping by deserializing all items of that
dict.
:param obj: the dict that needs deserializing.
:param cls: the type, optionally with a generic (e.g. Set[str]).
:param kwargs: any keyword arguments.
:return: a deserialized set instance.
"""
cls_ = Mapping
cls_args = get_args(cls)
if cls_args:
cls_ = MappingType[cls_args]
dict_ = default_dict_deserializer(obj, cls_, **kwargs)
result = dict_
# Strip any generics from cls to allow for an instance check.
if not isinstance(result, get_origin(cls)):
result = cls(dict_)
return result
*,
tasks: int = 1,
task_type: type = Process,
**kwargs) -> list:
"""
Deserialize a list by deserializing all items of that list.
:param obj: the list that needs deserializing.
:param cls: the type optionally with a generic (e.g. List[str]).
:param tasks: the allowed number of tasks (threads or processes).
:param task_type: the type that is used for multitasking.
:param kwargs: any keyword arguments.
:return: a deserialized list instance.
"""
cls_ = None
kwargs_ = {**kwargs}
cls_args = get_args(cls)
if cls_args:
cls_ = cls_args[0]
# Mark the cls as 'inferred' so that later it is known where cls came
# from and the precedence of classes can be determined.
kwargs_['_inferred_cls'] = True
if tasks == 1:
result = [load(elem, cls=cls_, tasks=1, **kwargs_) for elem in obj]
elif tasks > 1:
result = multi_task(load, obj, tasks, task_type, cls_, **kwargs_)
else:
raise JsonsError('Invalid number of tasks: {}'.format(tasks))
return result
obj: dict,
cls: type,
*,
key_transformer: Optional[Callable[[str], str]] = None,
**kwargs) -> dict:
"""
Deserialize a dict by deserializing all instances of that dict.
:param obj: the dict that needs deserializing.
:param key_transformer: a function that transforms the keys to a different
style (e.g. PascalCase).
:param cls: not used.
:param kwargs: any keyword arguments.
:return: a deserialized dict instance.
"""
key_tfr = key_transformer or (lambda key: key)
cls_args = get_args(cls)
kwargs_ = {**kwargs, 'key_transformer': key_transformer}
if len(cls_args) == 2:
cls_k, cls_v = cls_args
kwargs_k = {**kwargs_, 'cls': cls_k}
kwargs_v = {**kwargs_, 'cls': cls_v}
res = {load(key_tfr(k), **kwargs_k): load(obj[k], **kwargs_v)
for k in obj}
else:
res = {key_tfr(key): load(obj[key], **kwargs_)
for key in obj}
return res
def default_tuple_deserializer(obj: list,
cls: type = None,
**kwargs) -> object:
"""
Deserialize a (JSON) list into a tuple by deserializing all items of that
list.
:param obj: the tuple that needs deserializing.
:param cls: the type optionally with a generic (e.g. Tuple[str, int]).
:param kwargs: any keyword arguments.
:return: a deserialized tuple instance.
"""
if hasattr(cls, '_fields'):
return default_namedtuple_deserializer(obj, cls, **kwargs)
cls_args = get_args(cls)
if cls_args:
tuple_types = getattr(cls, '__tuple_params__', cls_args)
if tuple_with_ellipsis(cls):
tuple_types = [tuple_types[0]] * len(obj)
list_ = [load(value, tuple_types[i], **kwargs)
for i, value in enumerate(obj)]
else:
list_ = [load(value, **kwargs) for i, value in enumerate(obj)]
return tuple(list_)
def _get_subclasses(obj: Iterable, cls: type = None) -> Tuple[type, ...]:
subclasses = (None,) * len(obj)
if cls:
args = get_args(cls)
if len(args) == 1:
# E.g. List[int]
subclasses = args * len(obj)
elif len(args) > 1:
# E.g. Tuple[int, str, str]
subclasses = args
if len(subclasses) != len(obj):
msg = ('Not enough generic types ({}) in {}, expected {} to match '
'the iterable of length {}'
.format(len(subclasses), cls, len(obj), len(obj)))
raise SerializationError(msg)
return subclasses
def _store_cls_info(result: object, original_obj: dict, kwargs):
if kwargs.get('_store_cls', None) and isinstance(result, dict):
cls = get_type(original_obj)
if cls.__module__ == 'typing':
cls_name = repr(cls)
else:
cls_name = get_class_name(cls, fully_qualified=True)
result['-cls'] = cls_name
def _store_cls_info(result: object, attr: str, original_obj: dict, **kwargs):
if isinstance(result, dict) and kwargs.get('_store_cls'):
cls = get_type(original_obj[attr])
if cls.__module__ == 'typing':
cls_name = repr(cls)
else:
cls_name = get_class_name(cls, fully_qualified=True,
fork_inst=kwargs['fork_inst'])
result['-cls'] = cls_name
def determine_cls(obj: Iterable, cls: Optional[type]) -> Optional[type]:
cls_ = cls
if not cls and hasattr(obj, '__getitem__') and len(obj) > 0:
obj_with_only_one_elem = obj.__getitem__(slice(0, 1))
cls_ = get_type(obj_with_only_one_elem)
return cls_
def get_type(obj: Any) -> Type['NPType']:
"""
Return the nptyping type of the given obj. The given obj can be a numpy
ndarray, a dtype or a Python type. If no corresponding nptyping type
can be determined, a TypeError is raised.
:param obj: the object for which an nptyping type is to be returned.
:return: a subclass of NPType.
"""
return ClsFunction(_delegates)(obj)
def _after_subscription(cls, item: Any) -> None:
method = ClsFunction(OrderedDict([
(_Size, cls._only_size),
(_Type, cls._only_type),
(_NSizes, lambda _: ...),
(_SizeAndType, cls._size_and_type),
(_Sizes, cls._only_sizes),
(_SizesAndType, cls._sizes_and_type),
(_NSizesAndType, cls._sizes_and_type),
(_Default, lambda _: ...),
]))
if not method.understands(item):
raise TypeError('Invalid parameter for NDArray: "{}"'.format(item))
return method(item)