Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
item_cls = Converter
break
else:
log.warn(f'{name!r} list type cannot be inferred')
item_cls = Converter
log.debug(f'Inferring {name!r} type: {cls} of {item_cls}')
self.attrs[name] = map_type(cls, name=name, item_cls=item_cls)
elif issubclass(cls, dict):
cls.__origin__ = dict
log.debug(f'Inferring {name!r} type: {cls}')
self.attrs[name] = map_type(cls, name=name, item_cls=Converter)
else:
log.debug(f'Inferring {name!r} type: {cls}')
self.attrs[name] = map_type(cls, name=name)
for name, converter in self.attrs.items():
log.debug(f"Converting '{name}' data with {converter}")
if getattr(converter, 'DATACLASS', None):
self._set_dataclass_value(data, name, converter)
else:
self._set_attribute_value(data, name, converter, _first)
hooks.apply(self._instance, self)
self.modified = False
if issubclass(cls, list):
cls.__origin__ = list
if value:
item_cls = type(value[0])
for item in value:
if not isinstance(item, item_cls):
log.warn(f'{name!r} list type cannot be inferred')
item_cls = Converter
break
else:
log.warn(f'{name!r} list type cannot be inferred')
item_cls = Converter
log.debug(f'Inferring {name!r} type: {cls} of {item_cls}')
self.attrs[name] = map_type(cls, name=name, item_cls=item_cls)
elif issubclass(cls, dict):
cls.__origin__ = dict
log.debug(f'Inferring {name!r} type: {cls}')
self.attrs[name] = map_type(cls, name=name, item_cls=Converter)
else:
log.debug(f'Inferring {name!r} type: {cls}')
self.attrs[name] = map_type(cls, name=name)
for name, converter in self.attrs.items():
log.debug(f"Converting '{name}' data with {converter}")
if getattr(converter, 'DATACLASS', None):
self._set_dataclass_value(data, name, converter)
else:
self._set_attribute_value(data, name, converter, _first)
# TODO: Move this parsing into config.py
pattern = getattr(m, 'datafile_pattern', None)
attrs = getattr(m, 'datafile_attrs', None)
manual = getattr(m, 'datafile_manual', Meta.datafile_manual)
defaults = getattr(m, 'datafile_defaults', Meta.datafile_defaults)
auto_load = getattr(m, 'datafile_auto_load', Meta.datafile_auto_load)
auto_save = getattr(m, 'datafile_auto_save', Meta.datafile_auto_save)
auto_attr = getattr(m, 'datafile_auto_attr', Meta.datafile_auto_attr)
if attrs is None and dataclasses.is_dataclass(obj):
attrs = {}
log.debug(f'Mapping attributes for {obj.__class__} object')
for field in dataclasses.fields(obj):
self_name = f'self.{field.name}'
if pattern is None or self_name not in pattern:
attrs[field.name] = map_type(field.type, name=field.name)
return Mapper(
obj,
attrs=attrs,
pattern=pattern,
manual=manual,
defaults=defaults,
auto_load=auto_load,
auto_save=auto_save,
auto_attr=auto_attr,
root=root,
)
m = getattr(obj, 'Meta', None)
pattern = getattr(m, 'datafile_pattern', None)
attrs = getattr(m, 'datafile_attrs', None)
manual = getattr(m, 'datafile_manual', ModelMeta.datafile_manual)
defaults = getattr(m, 'datafile_defaults', ModelMeta.datafile_defaults)
auto_load = getattr(m, 'datafile_auto_load', ModelMeta.datafile_auto_load)
auto_save = getattr(m, 'datafile_auto_save', ModelMeta.datafile_auto_save)
auto_attr = getattr(m, 'datafile_auto_attr', ModelMeta.datafile_auto_attr)
if attrs is None and dataclasses.is_dataclass(obj):
attrs = {}
log.debug(f'Mapping attributes for {obj.__class__} object')
for field in dataclasses.fields(obj):
self_name = f'self.{field.name}'
if pattern is None or self_name not in pattern:
attrs[field.name] = map_type(field.type, name=field.name)
return Mapper(
obj,
attrs=attrs,
pattern=pattern,
manual=manual,
defaults=defaults,
auto_load=auto_load,
auto_save=auto_save,
auto_attr=auto_attr,
root=root,
)