Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
metaDict = []
bandDatasetMask = 'HDF4_EOS:EOS_SWATH:"%s":%s:ImageData'
for bandName, bandWave in zip(bandNames, bandWaves):
metaEntry = {'src': {'SourceFilename': (bandDatasetMask
% (filename, bandName)),
'SourceBand': 1,
'DataType': 6,
},
'dst': {'wkv': 'toa_outgoing_spectral_radiance',
'wavelength': str(bandWave),
'suffix': str(bandWave),
}}
metaDict.append(metaEntry)
# create empty VRT dataset with geolocation only
gdalSubDataset = gdal.Open(metaDict[0]['src']['SourceFilename'])
self._init_from_gdal_dataset(gdalSubDataset, metadata=gdalSubDataset.GetMetadata())
# add bands with metadata and corresponding values to the empty VRT
self.create_bands(metaDict)
# find largest lon/lat subdatasets
latShape0 = 0
for subDataset in subDatasets:
if 'Latitude' in subDataset[1]:
ls = int(subDataset[1].strip().split('[')[1].split('x')[0])
if ls >= latShape0:
latShape0 = ls
latSubDS = subDataset[0]
if 'Longitude' in subDataset[1]:
ls = int(subDataset[1].strip().split('[')[1].split('x')[0])
if ls >= latShape0:
-----------
fileName : string
gdalDataset : gdal dataset
gdalMetadata : gdal metadata
latlonGrid : numpy 2 layered 2D array with lat/lons of desired grid
'''
# test if input files is ASCAT
iDir, iFile = os.path.split(fileName)
iFileName, iFileExt = os.path.splitext(iFile)
try:
assert iFileName[0:6] == 'ascat_' and iFileExt == '.nc'
except:
raise WrongMapperError
# Create geolocation
subDataset = gdal.Open('NETCDF:"' + fileName + '":lat')
self.GeolocVRT = VRT(srcRasterXSize=subDataset.RasterXSize,
srcRasterYSize=subDataset.RasterYSize)
GeolocMetaDict = [{'src': {'SourceFilename': ('NETCDF:"' + fileName +
'":lon'),
'SourceBand': 1,
'ScaleRatio': 0.00001,
'ScaleOffset': -360},
'dst': {}},
{'src': {'SourceFilename': ('NETCDF:"' + fileName +
'":lat'),
'SourceBand': 1,
'ScaleRatio': 0.00001,
'ScaleOffset': 0},
'dst': {}}]
simFilesMask = os.path.join(iDir, iFileName[0:30] + '*.nc')
simFiles = glob.glob(simFilesMask)
print('simFilesMask, simFiles', simFilesMask, simFiles)
metaDict = []
for simFile in simFiles:
print('simFile', simFile)
# open file, get metadata and get parameter name
simSupDataset = gdal.Open(simFile)
simSubDatasets = simSupDataset.GetSubDatasets()
simWKV = None
for simSubDataset in simSubDatasets:
if '_mean' in simSubDataset[0]:
simValidSupDataset = simSupDataset
simGdalDataset = gdal.Open(simSubDataset[0])
simBand = simGdalDataset.GetRasterBand(1)
simBandMetadata = simBand.GetMetadata()
simVarname = simBandMetadata['NETCDF_VARNAME']
# get WKV
print(' simVarname', simVarname)
if simVarname in self.varname2wkv:
simWKV = self.varname2wkv[simVarname]
break
# skipp adding this similar file if it is not valid
if simWKV is None:
continue
metaEntry = {
'src': {'SourceFilename': simSubDataset[0],
'SourceBand': 1},
else:
antennaPointing = -90
rs2_3 = rs2_1.node('orbitAndAttitude').node('orbitInformation')
passDirection = rs2_3['passDirection']
# create empty VRT dataset with geolocation only
VRT.__init__(self, gdalDataset)
#define dictionary of metadata and band specific parameters
pol = []
metaDict = []
# Get the subdataset with calibrated sigma0 only
for dataset in gdalDataset.GetSubDatasets():
if dataset[1] == 'Sigma Nought calibrated':
s0dataset = gdal.Open(dataset[0])
s0datasetName = dataset[0][:]
band = s0dataset.GetRasterBand(1)
s0datasetPol = band.GetMetadata()['POLARIMETRIC_INTERP']
for i in range(1, s0dataset.RasterCount+1):
iBand = s0dataset.GetRasterBand(i)
polString = iBand.GetMetadata()['POLARIMETRIC_INTERP']
suffix = polString
# The nansat data will be complex
# if the SAR data is of type 10
dtype = iBand.DataType
if dtype == 10:
# add intensity band
metaDict.append(
{'src': {'SourceFilename':
('RADARSAT_2_CALIB:SIGMA0:'
+ fileName + '/product.xml'),
# Get file names from dataset or subdataset
subDatasets = gdalDataset.GetSubDatasets()
if len(subDatasets) == 0:
fileNames = [fileName]
else:
fileNames = [f[0] for f in subDatasets]
# add bands with metadata and corresponding values to the empty VRT
metaDict = []
geoFileDict = {}
xDatasetSource = ''
yDatasetSource = ''
firstXSize = 0
firstYSize = 0
for i, fileName in enumerate(fileNames):
subDataset = gdal.Open(fileName)
# choose the first dataset whith grid
if (firstXSize == 0 and firstYSize == 0 and
subDataset.RasterXSize > 1 and subDataset.RasterYSize > 1):
firstXSize = subDataset.RasterXSize
firstYSize = subDataset.RasterYSize
firstSubDataset = subDataset
# get projection from the first subDataset
projection = firstSubDataset.GetProjection()
# take bands whose sizes are same as the first band.
if (subDataset.RasterXSize == firstXSize and
subDataset.RasterYSize == firstYSize):
if projection == '':
projection = subDataset.GetProjection()
if ('GEOLOCATION_X_DATASET' in fileName or
'longitude' in fileName):
self.correct_geolocation_data()
# read manifest file
manifest_data = self.read_manifest_data(manifest_files[0])
# very fast constructor without any bands only with some metadata and geolocation
self._init_empty(manifest_data, self.annotation_data)
# skip adding bands in the fast mode and RETURN
if fast:
return
# Open data files with GDAL
gdalDatasets = {}
for pol in polarizations:
gdalDatasets[pol] = gdal.Open(mds_files[pol])
if not gdalDatasets[pol]:
raise WrongMapperError('%s: No Sentinel-1 datasets found' % mds_files[pol])
# Check metadata to confirm it is Sentinel-1 L1
metadata = gdalDatasets[polarizations[0]].GetMetadata()
# create full size VRTs with incidenceAngle and elevationAngle
annotation_vrts = self.vrts_from_arrays(self.annotation_data,
['incidenceAngle', 'elevationAngle'])
self.band_vrts.update(annotation_vrts)
# create full size VRTS with calibration LUT
calibration_names = ['sigmaNought', 'betaNought']
calibration_list_tag = 'calibrationVectorList'
for calibration_file in calibration_files:
Number_lines = int(gdalMetadata['NC_GLOBAL#Number_lines'])
Number_columns = int(gdalMetadata['NC_GLOBAL#Number_columns'])
Element_x_size = float(gdalMetadata['NC_GLOBAL#Element_x_size'])
Element_y_size = float(gdalMetadata['NC_GLOBAL#Element_y_size'])
except:
raise WrongMapperError
# find subdataset with DEPTH
subDatasets = gdalDataset.GetSubDatasets()
dSourceFile = None
for subDataset in subDatasets:
if subDataset[0].endswith('.mnt":DEPTH'):
dSourceFile = subDataset[0]
if dSourceFile is None:
raise WrongMapperError
dSubDataset = gdal.Open(dSourceFile)
dMetadata = dSubDataset.GetMetadata()
try:
scale_factor = dMetadata['DEPTH#scale_factor']
add_offset = dMetadata['DEPTH#add_offset']
except:
raise WrongMapperError
geoTransform = [mbWestLongitude, Element_x_size, 0,
mbNorthLatitude, 0, -Element_y_size]
# create empty VRT dataset with geolocation only
VRT.__init__(self, srcGeoTransform=geoTransform,
srcMetadata=gdalMetadata,
srcProjection=NSR(mbProj4String).wkt,
srcRasterXSize=Number_columns,
test_openable(self.fileName)
else:
ff = glob.glob(os.path.join(self.fileName,'*.*'))
for f in ff:
test_openable(f)
# lazy import of nansat mappers
# if nansat mappers were not imported yet
global nansatMappers
if nansatMappers is None:
nansatMappers = _import_mappers()
# open GDAL dataset. It will be parsed to all mappers for testing
gdalDataset = None
if self.fileName[:4] != 'http':
try:
gdalDataset = gdal.Open(self.fileName)
except RuntimeError:
self.logger.error('GDAL could not open ' + self.fileName +
', trying to read with Nansat mappers...')
if gdalDataset is not None:
# get metadata from the GDAL dataset
metadata = gdalDataset.GetMetadata()
else:
metadata = None
tmpVRT = None
importErrors = []
if mapperName is not '':
# If a specific mapper is requested, we test only this one.
# get the module name
mapperName = 'mapper_' + mapperName.replace('mapper_',
# Get file names from dataset or subdataset
subDatasets = gdalDataset.GetSubDatasets()
if len(subDatasets) == 0:
filenames = [inputFileName]
else:
filenames = [f[0] for f in subDatasets]
# add bands with metadata and corresponding values to the empty VRT
metaDict = []
xDatasetSource = ''
yDatasetSource = ''
firstXSize = 0
firstYSize = 0
for _, filename in enumerate(filenames):
subDataset = gdal.Open(filename)
# choose the first dataset whith grid
if (firstXSize == 0 and firstYSize == 0 and
subDataset.RasterXSize > 1 and subDataset.RasterYSize > 1):
firstXSize = subDataset.RasterXSize
firstYSize = subDataset.RasterYSize
firstSubDataset = subDataset
# get projection from the first subDataset
projection = firstSubDataset.GetProjection()
# take bands whose sizes are same as the first band.
if (subDataset.RasterXSize == firstXSize and
subDataset.RasterYSize == firstYSize):
if projection == '':
projection = subDataset.GetProjection()
if ('GEOLOCATION_X_DATASET' in filename or
'longitude' in filename):