Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def find_examples_in_schema(self):
"""Returns generator for all examples in schema at given path"""
with open(str(self.fspath), 'rb') as fd:
schema_tree = yaml.safe_load(fd)
for node in treeutil.iter_tree(schema_tree):
if (isinstance(node, dict) and
'examples' in node and
isinstance(node['examples'], list)):
for desc, example in node['examples']:
yield example
suburl_path = suburl[:-(len(fragment) + 1)]
else:
suburl_path = suburl
suburl_path = resolver(suburl_path)
if suburl_path == url:
subschema = schema
else:
subschema = load_schema(suburl_path, resolver, True)
subschema_fragment = reference.resolve_fragment(
subschema, fragment)
return subschema_fragment
return node
schema = treeutil.walk_and_modify(schema, resolve_refs)
return schema
else:
schema, url = loader(url)
# Resolve local references
if resolve_local_refs:
def resolve_local(node, json_id):
if isinstance(node, dict) and '$ref' in node:
ref_url = resolver(node['$ref'])
if ref_url.startswith('#'):
parts = urlparse.urlparse(ref_url)
subschema_fragment = reference.resolve_fragment(
schema, parts.fragment)
return subschema_fragment
return node
schema = treeutil.walk_and_modify(schema, resolve_local)
if resolve_references:
def resolve_refs(node, json_id):
if json_id is None:
json_id = url
if isinstance(node, dict) and '$ref' in node:
suburl = generic_io.resolve_uri(json_id, resolver(node['$ref']))
parts = urlparse.urlparse(suburl)
fragment = parts.fragment
if len(fragment):
suburl_path = suburl[:-(len(fragment) + 1)]
else:
suburl_path = suburl
suburl_path = resolver(suburl_path)
if suburl_path == url:
subschema = schema
def resolve_references(tree, ctx, do_not_fill_defaults=False):
"""
Resolve all of the references in the tree, by loading the external
data and inserting it directly into the tree.
"""
def do_resolve(tree):
if isinstance(tree, Reference):
return tree(do_not_fill_defaults=do_not_fill_defaults)
return tree
tree = find_references(tree, ctx)
return treeutil.walk_and_modify(tree, do_resolve)
def validate_large_literals(instance, reading=False):
"""
Validate that the tree has no large numeric literals.
"""
# We can count on 52 bits of precision
for instance in treeutil.iter_tree(instance):
if not isinstance(instance, Integral):
continue
if instance <= ((1 << 51) - 1) and instance >= -((1 << 51) - 2):
continue
if not reading:
raise ValidationError(
"Integer value {0} is too large to safely represent as a "
"literal in ASDF".format(instance))
warnings.warn(
"Invalid integer literal value {0} detected while reading file. "
"The value has been read safely, but the file should be "
"fixed.".format(instance)
# write out in a serial fashion.
self._serial_write(fd, pad_blocks, include_block_index)
fd.truncate()
return
# Estimate how big the tree will be on disk by writing the
# YAML out in memory. Since the block indices aren't yet
# known, we have to count the number of block references and
# add enough space to accommodate the largest block number
# possible there.
tree_serialized = io.BytesIO()
self._write_tree(self._tree, tree_serialized, pad_blocks=False)
array_ref_count = [0]
from .tags.core.ndarray import NDArrayType
for node in treeutil.iter_tree(self._tree):
if (isinstance(node, (np.ndarray, NDArrayType)) and
self.blocks[node].array_storage == 'internal'):
array_ref_count[0] += 1
serialized_tree_size = (
tree_serialized.tell() +
constants.MAX_BLOCKS_DIGITS * array_ref_count[0])
if not block.calculate_updated_layout(
self.blocks, serialized_tree_size,
pad_blocks, fd.block_size):
# If we don't have any blocks that are being reused, just
# write out in a serial fashion.
self._serial_write(fd, pad_blocks, include_block_index)
fd.truncate()
return
"""
Run a "hook" for each custom type found in the tree.
Parameters
----------
hookname : str
The name of the hook. If a `AsdfType` is found with a method
with this name, it will be called for every instance of the
corresponding custom type in the tree.
"""
type_index = self.type_index
if not type_index.has_hook(hookname):
return
for node in treeutil.iter_tree(self._tree):
hook = type_index.get_hook_for_type(hookname, type(node),
self.version_string)
if hook is not None:
hook(node, self)
# If a tag class does not explicitly list compatible versions, then all
# versions of the corresponding schema are assumed to be compatible.
# Therefore we need to check to make sure whether the conversion is
# actually successful, and just return a raw Python data type if it is
# not.
try:
return tag_type.from_tree_tagged(node, ctx)
except TypeError as err:
warnings.warn("Failed to convert {} to custom type (detail: {}). "
"Using raw Python data structure instead".format(real_tag, err),
AsdfConversionWarning)
return node
return treeutil.walk_and_modify(tree, walker)
def _find_used_blocks(self, tree, ctx):
reserved_blocks = set()
for node in treeutil.iter_tree(tree):
hook = ctx.type_index.get_hook_for_type(
'reserve_blocks', type(node), ctx.version_string)
if hook is not None:
for block in hook(node, ctx):
reserved_blocks.add(block)
for block in list(self.blocks):
if (getattr(block, '_used', 0) == 0 and
block not in reserved_blocks):
self.remove(block)