Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
try:
satellite = gdalDataset.GetDescription().split(",")[2]
except (AttributeError, IndexError):
raise WrongMapperError
satDict = Mapper.calibration()
for sat in satDict:
if sat['name'] == satellite:
print('This is ' + satellite)
wavelengths = sat['wavelengths']
try:
scale = sat['scale']
offset = sat['offset']
except:
print("No scale and offset found")
scale = None
offset = None
try:
LUT = sat['LUT']
def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
''' OBPG L3 VRT '''
try:
assert 'Level-3 Standard Mapped Image' in gdalMetadata['Title']
except:
raise WrongMapperError
# get list of similar (same date) files in the directory
iDir, iFile = os.path.split(filename)
iFileName, iFileExt = os.path.splitext(iFile)
simFilesMask = os.path.join(iDir, iFileName)
simFiles = glob.glob(simFilesMask + iFileExt[0:6] + '*')
#print 'simFilesMask, simFiles', simFilesMask, simFiles
metaDict = []
for simFile in simFiles:
#print 'simFile', simFile
# open file, get metadata and get parameter name
simSupDataset = gdal.Open(simFile)
if simSupDataset is None:
# skip this similar file
#print 'No dataset: %s not a supported SMI file' % simFile
manifest_data = self.read_manifest_data(manifest_files[0])
# very fast constructor without any bands only with some metadata and geolocation
self._init_empty(manifest_data, self.annotation_data)
# skip adding bands in the fast mode and RETURN
if fast:
return
# Open data files with GDAL
gdalDatasets = {}
for pol in polarizations:
gdalDatasets[pol] = gdal.Open(mds_files[pol])
if not gdalDatasets[pol]:
raise WrongMapperError('%s: No Sentinel-1 datasets found' % mds_files[pol])
# Check metadata to confirm it is Sentinel-1 L1
metadata = gdalDatasets[polarizations[0]].GetMetadata()
# create full size VRTs with incidenceAngle and elevationAngle
annotation_vrts = self.vrts_from_arrays(self.annotation_data,
['incidenceAngle', 'elevationAngle'])
self.band_vrts.update(annotation_vrts)
# create full size VRTS with calibration LUT
calibration_names = ['sigmaNought', 'betaNought']
calibration_list_tag = 'calibrationVectorList'
for calibration_file in calibration_files:
pol = '_' + os.path.basename(calibration_file).split('-')[4].upper()
xml = self.read_vsi(calibration_file)
calibration_data = self.read_calibration(xml, calibration_list_tag, calibration_names, pol)
''' Create VRT
Parameters
----------
GCP_COUNT : int
number of GCPs along each dimention
'''
# extension must be .nc
if os.path.splitext(filename)[1] != '.nc':
raise WrongMapperError
# file must contain navigation_data/longitude
try:
ds = gdal.Open('HDF5:"%s"://navigation_data/longitude' % filename)
except RuntimeError:
raise WrongMapperError
else:
dsMetadata = ds.GetMetadata()
# title value must be known
if dsMetadata.get('title', '') not in self.titles:
raise WrongMapperError
# get geophysical data variables
subDatasets = gdal.Open(filename).GetSubDatasets()
metaDict = []
for subDataset in subDatasets:
groupName = subDataset[0].split('/')[-2]
if groupName not in ['geophysical_data', 'navigation_data']:
continue
varName = subDataset[0].split('/')[-1]
subds = gdal.Open(subDataset[0])
self.logger.debug('Trying %s...' % iMapper)
# show all ImportError warnings before trying generic_mapper
if iMapper == 'mapper_generic' and len(import_errors) > 0:
self.logger.error('\nWarning! The following mappers failed:')
for ie in import_errors:
self.logger.error(import_errors)
# create a Mapper object and get VRT dataset from it
try:
tmp_vrt = nansatMappers[iMapper](self.filename, gdal_dataset, metadata, **kwargs)
self.logger.info('Mapper %s - success!' % iMapper)
self.mapper = iMapper.replace('mapper_', '')
break
except WrongMapperError:
pass
# if no mapper fits, make simple copy of the input DS into a VSI/VRT
if tmp_vrt is None and gdal_dataset is not None:
self.logger.warning('No mapper fits, returning GDAL bands!')
tmp_vrt = VRT.from_gdal_dataset(gdal_dataset, metadata=metadata)
for iBand in range(gdal_dataset.RasterCount):
tmp_vrt.create_band({'SourceFilename': self.filename,
'SourceBand': iBand + 1})
tmp_vrt.dataset.FlushCache()
self.mapper = 'gdal_bands'
# if GDAL cannot open the file, and no mappers exist which can make VRT
if tmp_vrt is None and gdal_dataset is None:
# check if given data file exists
if not os.path.isfile(self.filename):
'''
Parameters
-----------
filename : string
gdalDataset : gdal dataset
gdalMetadata : gdal metadata
'''
self.setup_ads_parameters(filename, gdalMetadata)
if self.product[0:4] != "ASA_":
raise WrongMapperError
if not IMPORT_SCIPY:
raise NansatReadError('ASAR data cannot be read because scipy is not installed')
# get channel string (remove '/', since NetCDF
# does not support that in metadata)
polarization = [{'channel': gdalMetadata['SPH_MDS1_TX_RX_POLAR']
.replace("/", ""), 'bandNum': 1}]
# if there is the 2nd band, get channel string
if 'SPH_MDS2_TX_RX_POLAR' in gdalMetadata.keys():
channel = gdalMetadata['SPH_MDS2_TX_RX_POLAR'].replace("/", "")
if not(channel.isspace()):
polarization.append({'channel': channel,
'bandNum': 2})
# create empty VRT dataset with geolocation only
def __init__(self, filename, gdal_dataset, gdal_metadata, *args, **kwargs):
if not filename.endswith('nc'):
raise WrongMapperError
self.input_filename = filename
if not gdal_metadata:
raise WrongMapperError
if 'NC_GLOBAL#GDAL_NANSAT_GCPY_000' in list(gdal_metadata.keys()) or \
'NC_GLOBAL#GDAL_NANSAT_GCPProjection' in list(gdal_metadata.keys()):
# Probably Nansat generated netcdf of swath data - see issue #192
raise WrongMapperError
metadata = VRT._remove_strings_in_metadata_keys(gdal_metadata,
['NC_GLOBAL#', 'NANSAT_', 'GDAL_'])
# Set origin metadata (TODO: agree on keyword...)
origin = ''
nans = 'NANSAT'
if 'origin' in list(metadata.keys()):
origin = metadata['origin'] + ' '
for key in list(metadata.keys()):
if nans in key:
metadata['origin'] = origin + nans
# else: Nothing needs to be done, origin stays the same...
# Check conventions metadata
if 'Conventions' not in list(metadata.keys()) or 'CF' not in metadata['Conventions']:
def __init__(self, filename, gdalDataset, gdalMetadata,
outFolder=downloads, **kwargs):
"""Create NCEP VRT"""
if not os.path.exists(outFolder):
os.mkdir(outFolder)
##############
# Get time
##############
keyword_base = 'ncep_wind_online'
if filename[0:len(keyword_base)] != keyword_base:
raise WrongMapperError
time_str = filename[len(keyword_base)+1::]
time = datetime.strptime(time_str, '%Y%m%d%H%M')
print(time)
########################################
# Find and download online grib file
########################################
# Find closest 6 hourly modelrun and forecast hour
model_run_hour = round((time.hour + time.minute/60.)/6)*6
nearest_model_run = (datetime(time.year, time.month, time.day)
+ timedelta(hours=model_run_hour))
if sys.version_info < (2, 7):
td = (time - nearest_model_run)
forecast_hour = (td.microseconds +
(td.seconds + td.days * 24 * 3600)
def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
try:
geo_transform = gdalDataset.GetGeoTransform()[0:5]
except AttributeError:
raise WrongMapperError
if geo_transform != (-12.1, 0.2, 0.0, 81.95, 0.0):
raise WrongMapperError
metaDict = [{'src': {'SourceFilename': filename,
'SourceBand': 2,
'NODATA': 9999},
'dst': {'wkv': 'eastward_wind',
'height': '10 m'}
},
{'src': {'SourceFilename': filename,
'SourceBand': 3,
'NODATA': 9999},
'dst': {'wkv': 'northward_wind',
'height': '10 m'}
},
{'src': [{'SourceFilename': filename,
'SourceBand': 2,
'DataType': gdalDataset.GetRasterBand(2).DataType
def __init__(self, *args, **kwargs):
filename = args[0]
gdal_metadata = VRT._remove_strings_in_metadata_keys(args[2],
['NC_GLOBAL#', 'NANSAT_', 'GDAL_'])
gcmd_keywords_mapping = get_gcmd_keywords_mapping()
for key, val in list(gcmd_keywords_mapping.items()):
if 'source' in list(gdal_metadata.keys()) and key in gdal_metadata['source']:
instrument = gcmd_keywords_mapping[key]['instrument']
platform = gcmd_keywords_mapping[key]['platform']
if not 'instrument' in locals():
raise WrongMapperError
super(Mapper, self).__init__(*args, **kwargs)
time_coverage_start, time_coverage_end = self.time_coverage()
self.dataset.SetMetadataItem('time_coverage_start',
(time_coverage_start.isoformat()))
self.dataset.SetMetadataItem('time_coverage_end',
(time_coverage_end.isoformat()))
self.dataset.SetMetadataItem('instrument', instrument)
self.dataset.SetMetadataItem('platform', platform)