Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
a list of possible modifiers.
Returns (list or DatasetID): Matching key(s)
Raises: KeyError if no matching results or if more than one result is
found when `num_results` is `1`.
"""
if isinstance(key, numbers.Number):
# we want this ID to act as a query so we set modifiers to None
# meaning "we don't care how many modifiers it has".
key = DatasetID(wavelength=key, modifiers=None)
elif isinstance(key, (str, six.text_type)):
# ID should act as a query (see wl comment above)
key = DatasetID(name=key, modifiers=None)
elif not isinstance(key, DatasetID):
raise ValueError("Expected 'DatasetID', str, or number dict key, "
"not {}".format(str(type(key))))
res = filter_keys_by_dataset_id(key, key_container)
# further filter by other parameters
if resolution is not None:
if not isinstance(resolution, (list, tuple)):
resolution = (resolution, )
res = [k for k in res
if k.resolution is not None and k.resolution in resolution]
if polarization is not None:
if not isinstance(polarization, (list, tuple)):
polarization = (polarization, )
res = [k for k in res
did (DatasetID): Query parameters to match in the `key_container`.
key_container (iterable): Set, list, tuple, or dict of `DatasetID`
keys.
Returns (list): List of keys matching the provided parameters in no
specific order.
"""
keys = iter(key_container)
for key in DATASET_KEYS:
if getattr(did, key) is not None:
if key == "wavelength":
keys = [k for k in keys
if (getattr(k, key) is not None and
DatasetID.wavelength_match(getattr(k, key),
getattr(did, key)))]
else:
keys = [k for k in keys
if getattr(k, key) is not None and getattr(k, key)
== getattr(did, key)]
return keys
id_kwargs.append((tuple(val), ))
elif key == "modifiers" and val is None:
# empty modifiers means no modifiers applied
id_kwargs.append((tuple(), ))
elif isinstance(val, (list, tuple, set)):
# this key has multiple choices
# (ex. 250 meter, 500 meter, 1000 meter resolutions)
id_kwargs.append(val)
elif isinstance(val, dict):
id_kwargs.append(val.keys())
else:
# this key only has one choice so make it a one
# item iterable
id_kwargs.append((val, ))
for id_params in itertools.product(*id_kwargs):
dsid = DatasetID(*id_params)
ids.append(dsid)
# create dataset infos specifically for this permutation
ds_info = dataset.copy()
for key in DATASET_KEYS:
if isinstance(ds_info.get(key), dict):
ds_info.update(ds_info[key][getattr(dsid, key)])
# this is important for wavelength which was converted
# to a tuple
ds_info[key] = getattr(dsid, key)
self.ids[dsid] = ds_info
return ids
def _get_coordinates_for_dataset_key(self, dsid):
"""Get the coordinate dataset keys for *dsid*."""
ds_info = self.ids[dsid]
cids = []
for cinfo in ds_info.get('coordinates', []):
if not isinstance(cinfo, dict):
cinfo = {'name': cinfo}
cinfo['resolution'] = ds_info['resolution']
if 'polarization' in ds_info:
cinfo['polarization'] = ds_info['polarization']
cid = DatasetID(**cinfo)
cids.append(self.get_dataset_key(cid))
return cids
"""
for file_handlers in self.file_handlers.values():
fh = file_handlers[0]
# update resolution in the dataset IDs for this files resolution
try:
res = fh.resolution
except NotImplementedError:
continue
for ds_id, ds_info in list(self.ids.items()):
if fh.filetype_info['file_type'] != ds_info['file_type']:
continue
if ds_id.resolution is not None:
continue
ds_info['resolution'] = res
new_id = DatasetID.from_dict(ds_info)
self.ids[new_id] = ds_info
del self.ids[ds_id]
def available_datasets(self):
"""Automatically determine datasets provided by this file"""
sensor = self.get_sensor(self['/attr/sensor'])
nadir_resolution = self.get_nadir_resolution(sensor)
for var_name, val in self.file_content.items():
if isinstance(val, SDS):
ds_info = {
'file_type': self.filetype_info['file_type'],
'coordinates': ['longitude', 'latitude'],
'resolution': nadir_resolution,
}
yield DatasetID(name=var_name, resolution=nadir_resolution), ds_info
return
optional_datasets = self._get_prereq_datasets(
comp_node.name,
optional_prereqs,
keepables,
skip=True
)
try:
composite = compositor(prereq_datasets,
optional_datasets=optional_datasets,
**self.attrs)
req_ids = [DatasetID.from_dict(req.attrs) for req in prereq_datasets]
opt_ids = [DatasetID.from_dict(req.attrs) for req in optional_datasets]
composite.attrs['prerequisites'] = req_ids
composite.attrs['optional_prerequisites'] = opt_ids
cid = DatasetID.from_dict(composite.attrs)
self.datasets[cid] = composite
# update the node with the computed DatasetID
if comp_node.name in self.wishlist:
self.wishlist.remove(comp_node.name)
self.wishlist.add(cid)
comp_node.name = cid
except IncompatibleAreas:
LOG.warning("Delaying generation of %s "
"because of incompatible areas",
str(compositor.id))
preservable_datasets = set(self.datasets.keys())