Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
conf = {}
for composite_config in composite_configs:
with open(composite_config) as conf_file:
conf = recursive_dict_update(conf, yaml.load(conf_file, Loader=UnsafeLoader))
try:
sensor_name = conf['sensor_name']
except KeyError:
LOG.debug('No "sensor_name" tag found in %s, skipping.',
composite_config)
return
sensor_id = sensor_name.split('/')[-1]
sensor_deps = sensor_name.split('/')[:-1]
compositors = self.compositors.setdefault(sensor_id, DatasetDict())
modifiers = self.modifiers.setdefault(sensor_id, {})
for sensor_dep in reversed(sensor_deps):
if sensor_dep not in self.compositors or sensor_dep not in self.modifiers:
self.load_sensor_composites(sensor_dep)
if sensor_deps:
compositors.update(self.compositors[sensor_deps[-1]])
modifiers.update(self.modifiers[sensor_deps[-1]])
for composite_type in ['modifiers', 'composites']:
if composite_type not in conf:
continue
for composite_name in conf[composite_type]:
self._process_composite_config(composite_name, conf,
composite_type, sensor_id, composite_config, **kwargs)
'area': area,
})
filter_parameters = fp
if filter_parameters:
if reader_kwargs is None:
reader_kwargs = {}
reader_kwargs.setdefault('filter_parameters', {}).update(filter_parameters)
if filenames and isinstance(filenames, str):
raise ValueError("'filenames' must be a list of files: Scene(filenames=[filename])")
self.readers = self.create_reader_instances(filenames=filenames,
reader=reader,
reader_kwargs=reader_kwargs)
self.attrs.update(self._compute_metadata_from_readers())
self.datasets = DatasetDict()
self.cpl = CompositorLoader(self.ppp_config_dir)
comps, mods = self.cpl.load_compositors(self.attrs['sensor'])
self.wishlist = set()
self.dep_tree = DependencyTree(self.readers, comps, mods)
for node in dataset_nodes:
ds_id = node.name
# if we already have this node loaded or the node was assigned
# by the user (node data is None) then don't try to load from a
# reader
if ds_id in self.datasets or not isinstance(node.data, dict):
continue
reader_name = node.data.get('reader_name')
if reader_name is None:
# This shouldn't be possible
raise RuntimeError("Dependency tree has a corrupt node.")
reader_datasets.setdefault(reader_name, set()).add(ds_id)
# load all datasets for one reader at a time
loaded_datasets = DatasetDict()
for reader_name, ds_ids in reader_datasets.items():
reader_instance = self.readers[reader_name]
new_datasets = reader_instance.load(ds_ids, **kwargs)
loaded_datasets.update(new_datasets)
self.datasets.update(loaded_datasets)
return loaded_datasets
prologue_file = None
epilogue_file = None
for filename in self.info['filenames']:
try:
file_info = parse(pattern, os.path.basename(filename))
except ValueError:
continue
if file_info["segment"] == "EPI":
epilogue_file = filename
elif file_info["segment"] == "PRO":
prologue_file = filename
else:
image_files.append(filename)
start_times = set()
datasets = DatasetDict()
area_converted_to_extent = False
area_extent = None
for ds in dataset_keys:
channel_files = []
for filename in image_files:
file_info = parse(pattern, os.path.basename(filename))
if file_info["dataset_name"] == ds.name:
channel_files.append(filename)
start_times.add(file_info['start_time'])
if not channel_files:
continue
kwargs = {}
if 'platform_name' in self.info:
kwargs['platform_name'] = self.info['platform_name']
def load(self, dataset_keys, previous_datasets=None):
"""Load `dataset_keys`.
If `previous_datasets` is provided, do not reload those."""
all_datasets = previous_datasets or DatasetDict()
datasets = DatasetDict()
# Include coordinates in the list of datasets to load
dsids = [self.get_dataset_key(ds_key) for ds_key in dataset_keys]
coordinates = self._get_coordinates_for_dataset_keys(dsids)
all_dsids = list(set().union(*coordinates.values())) + dsids
for dsid in all_dsids:
if dsid in all_datasets:
continue
coords = [all_datasets.get(cid, None)
for cid in coordinates.get(dsid, [])]
ds = self._load_dataset_with_area(dsid, coords)
if ds is not None:
all_datasets[dsid] = ds
if dsid in dsids:
datasets[dsid] = ds
And `mods` is a dictionary:
sensor_name -> modifier name -> (modifier class,
modifiers options)
Note that these dictionaries are copies of those cached in
this object.
"""
comps = {}
mods = {}
for sensor_name in sensor_names:
if sensor_name not in self.compositors:
self.load_sensor_composites(sensor_name)
if sensor_name in self.compositors:
comps[sensor_name] = DatasetDict(
self.compositors[sensor_name].copy())
mods[sensor_name] = self.modifiers[sensor_name].copy()
return comps, mods
def load(self, dataset_keys, previous_datasets=None):
"""Load `dataset_keys`.
If `previous_datasets` is provided, do not reload those."""
all_datasets = previous_datasets or DatasetDict()
datasets = DatasetDict()
# Include coordinates in the list of datasets to load
dsids = [self.get_dataset_key(ds_key) for ds_key in dataset_keys]
coordinates = self._get_coordinates_for_dataset_keys(dsids)
all_dsids = list(set().union(*coordinates.values())) + dsids
for dsid in all_dsids:
if dsid in all_datasets:
continue
coords = [all_datasets.get(cid, None)
for cid in coordinates.get(dsid, [])]
ds = self._load_dataset_with_area(dsid, coords)
if ds is not None:
all_datasets[dsid] = ds
if dsid in dsids: