Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def extract_file(input_file, output_file):
"""Function for performing extraction from ASDF-in-FITS to pure ASDF."""
try:
with asdf.open(input_file) as ih:
if not isinstance(ih, AsdfInFits):
msg = "Given input file '{}' is not ASDF-in-FITS"
raise RuntimeError(msg.format(input_file))
with asdf.AsdfFile(ih.tree) as oh:
oh.write_to(output_file)
except (IOError, ValueError) as error:
raise RuntimeError(str(error))
Parameters
----------
input : str or file-like object
The input file.
output : str of file-like object
The output file.
resolve_references : bool, optional
If `True` resolve all external references before saving.
compress : str, optional
Compression to use.
"""
with asdf.open(input) as ff:
ff2 = AsdfFile(ff)
if resolve_references:
ff2.resolve_references()
ff2.write_to(
output,
all_array_storage='internal',
all_array_compression=compress)
def publish_model(args):
"""
Pushes the model to Google Cloud Storage and updates the index file.
:param args: :class:`argparse.Namespace` with "model", "gcs" and "force".
:return: None if successful, 1 otherwise.
"""
log = logging.getLogger("publish")
log.info("Reading %s...", os.path.abspath(args.model))
tree = asdf.open(args.model).tree
meta = tree["meta"]
log.info("Locking the bucket...")
transaction = uuid.uuid4().hex.encode()
if args.credentials:
client = Client.from_service_account_json(args.credentials)
else:
client = Client()
bucket = client.get_bucket(args.gcs)
sentinel = bucket.blob("index.lock")
locked = False
while not locked:
while sentinel.exists():
log.warning("Failed to acquire the lock, waiting...")
time.sleep(1)
# At this step, several agents may think the lockfile does not exist
try:
Parameters
----------
input : str or file-like object
The input file.
output : str of file-like object
The output file.
resolve_references : bool, optional
If `True` resolve all external references before saving.
"""
if output is None:
base, ext = os.path.splitext(input)
output = base + '.yaml'
with asdf.open(input) as ff:
ff2 = AsdfFile(ff)
if resolve_references:
ff2.resolve_references()
ff2.write_to(output, all_array_storage='inline')
def load_file(self, filepath):
image = AstroImage.AstroImage(logger=self.logger)
with asdf.open(filepath) as asdf_f:
image.load_asdf(asdf_f)
self.fitsimage.set_image(image)
self.setWindowTitle(filepath)
parts = []
try:
ff = AsdfFile()
code = AsdfFile._open_impl(ff, filename, _get_yaml_content=True)
code = '{0} {1}\n'.format(ASDF_MAGIC, version_string) + code.strip().decode('utf-8')
literal = nodes.literal_block(code, code)
literal['language'] = 'yaml'
set_source_info(self, literal)
parts.append(literal)
kwargs = dict()
# Use the ignore_unrecognized_tag parameter as a proxy for both options
kwargs['ignore_unrecognized_tag'] = 'ignore_unrecognized_tag' in self.arguments
kwargs['ignore_missing_extensions'] = 'ignore_unrecognized_tag' in self.arguments
with asdf.open(filename, **kwargs) as ff:
for i, block in enumerate(ff.blocks.internal_blocks):
data = codecs.encode(block.data.tostring(), 'hex')
if len(data) > 40:
data = data[:40] + '...'.encode()
allocated = block._allocated
size = block._size
data_size = block._data_size
flags = block._flags
if flags & BLOCK_FLAG_STREAMED:
allocated = size = data_size = 0
lines = []
lines.append('BLOCK {0}:'.format(i))
human_flags = []
source = config[self.NAME][source]
else:
if not is_uuid:
raise ValueError("File path, URL or UUID is needed.")
for models in config.values():
if source in models:
source = models[source]
break
else:
raise FileNotFoundError("Model %s not found." % source)
source = source["url"]
if source.startswith("http://") or source.startswith("https://"):
self._fetch(source, file_name)
source = file_name
self._log.info("Reading %s...", source)
with asdf.open(source) as model:
tree = model.tree
self._meta = tree["meta"]
if self.NAME != self._meta["model"] and self.NAME is not None:
raise ValueError(
"The supplied model is of the wrong type: needed "
"%s, got %s." % (self.NAME, self._meta["model"]))
self._load(tree)
finally:
if self.NAME is None:
shutil.rmtree(cache_dir)
"""
Explode a given ASDF file so each data block is in a separate
file.
Parameters
----------
input : str or file-like object
The input file.
output : str of file-like object
The output file.
"""
if output is None:
base, ext = os.path.splitext(input)
output = base + '_exploded' + '.asdf'
with asdf.open(input) as ff:
ff.write_to(output, all_array_storage='external')