Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
except:
IMPORT_SCIPY = False
else:
IMPORT_SCIPY = True
import pythesint as pti
from nansat.nsr import NSR
from nansat.vrt import VRT
from nansat.domain import Domain
from nansat.node import Node
from nansat.tools import initial_bearing, gdal, ogr
from nansat.tools import WrongMapperError, NansatReadError
class Mapper(VRT):
''' Create VRT with mapping of WKV for Radarsat2 '''
def __init__(self, inputFileName, gdalDataset, gdalMetadata,
xmlonly=False, **kwargs):
''' Create Radarsat2 VRT '''
fPathName, fExt = os.path.splitext(inputFileName)
if zipfile.is_zipfile(inputFileName):
# Open zip file using VSI
fPath, fName = os.path.split(fPathName)
filename = '/vsizip/%s/%s' % (inputFileName, fName)
if not 'RS' in fName[0:2]:
raise WrongMapperError('%s: Provided data is not Radarsat-2'
%fName)
gdalDataset = gdal.Open(filename)
gdalMetadata = gdalDataset.GetMetadata()
if self.product[0:4] != "ASA_":
raise WrongMapperError
# get channel string (remove '/', since NetCDF
# does not support that in metadata)
polarization = [{'channel': gdalMetadata['SPH_MDS1_TX_RX_POLAR']
.replace("/", ""), 'bandNum': 1}]
# if there is the 2nd band, get channel string
if 'SPH_MDS2_TX_RX_POLAR' in gdalMetadata.keys():
channel = gdalMetadata['SPH_MDS2_TX_RX_POLAR'].replace("/", "")
if not(channel.isspace()):
polarization.append({'channel': channel,
'bandNum': 2})
# create empty VRT dataset with geolocation only
VRT.__init__(self, gdalDataset)
# get calibration constant
gotCalibration = True
try:
for iPolarization in polarization:
metaKey = ('MAIN_PROCESSING_PARAMS_ADS_CALIBRATION_FACTORS.%d.EXT_CAL_FACT'
% (iPolarization['bandNum']))
iPolarization['calibrationConst'] = float(
gdalDataset.GetMetadataItem(metaKey, 'records'))
except:
try:
for iPolarization in polarization:
# Apparently some ASAR files have calibration
# constant stored in another place
metaKey = ('MAIN_PROCESSING_PARAMS_ADS_0_CALIBRATION_FACTORS.%d.EXT_CAL_FACT'
% (iPolarization['bandNum']))
for ie in import_errors:
self.logger.error(import_errors)
# create a Mapper object and get VRT dataset from it
try:
tmp_vrt = nansatMappers[iMapper](self.filename, gdal_dataset, metadata, **kwargs)
self.logger.info('Mapper %s - success!' % iMapper)
self.mapper = iMapper.replace('mapper_', '')
break
except WrongMapperError:
pass
# if no mapper fits, make simple copy of the input DS into a VSI/VRT
if tmp_vrt is None and gdal_dataset is not None:
self.logger.warning('No mapper fits, returning GDAL bands!')
tmp_vrt = VRT.from_gdal_dataset(gdal_dataset, metadata=metadata)
for iBand in range(gdal_dataset.RasterCount):
tmp_vrt.create_band({'SourceFilename': self.filename,
'SourceBand': iBand + 1})
tmp_vrt.dataset.FlushCache()
self.mapper = 'gdal_bands'
# if GDAL cannot open the file, and no mappers exist which can make VRT
if tmp_vrt is None and gdal_dataset is None:
# check if given data file exists
if not os.path.isfile(self.filename):
raise IOError('%s: File does not exist' % (self.filename))
raise NansatReadError('%s: File cannot be read with NANSAT - '
'consider writing a mapper' % self.filename)
return tmp_vrt
dst_raster_band = self.dataset.GetRasterBand(self.dataset.RasterCount)
# Append sources to destination dataset
if len(srcs) == 1 and srcs[0]['SourceBand'] > 0:
# only one source
dst_raster_band.SetMetadataItem(str('source_0'), str(srcs[0]['XML']),
str('new_vrt_sources'))
elif len(srcs) > 1:
# several sources for PixelFunction
metadataSRC = {}
for i, src in enumerate(srcs):
metadataSRC['source_%d' % i] = src['XML']
dst_raster_band.SetMetadata(metadataSRC, str('vrt_sources'))
# set metadata from WKV
dst_raster_band = VRT._put_metadata(dst_raster_band, wkv)
# set metadata from provided parameters
# remove and add params
dst['SourceFilename'] = srcs[0]['SourceFilename']
dst['SourceBand'] = str(srcs[0]['SourceBand'])
dst_raster_band = VRT._put_metadata(dst_raster_band, dst)
# return name of the created band
return dst['name']
# If SpatialRef and extent string are given (but not dataset)
elif srs is not None and ext is not None:
srs = NSR(srs)
# create full dictionary of parameters
extentDic = self._create_extentDic(ext)
# convert -lle to -te
if 'lle' in extentDic.keys():
extentDic = self._convert_extentDic(srs, extentDic)
# get size/extent from the created extet dictionary
[geoTransform,
rasterXSize, rasterYSize] = self._get_geotransform(extentDic)
# create VRT object with given geo-reference parameters
self.vrt = VRT(srcGeoTransform=geoTransform,
srcProjection=srs.wkt,
srcRasterXSize=rasterXSize,
srcRasterYSize=rasterYSize)
self.extentDic = extentDic
elif lat is not None and lon is not None:
# create self.vrt from given lat/lon
self.vrt = VRT(lat=lat, lon=lon)
else:
raise OptionError('"dataset" or "srsString and extentString" '
'or "dataset and srsString" are required')
self.logger.debug('vrt.dataset: %s' % str(self.vrt.dataset))
lineOffset = (dsOffsetDict['DS_OFFSET'] +
adsParams['offset'] +
dsOffsetDict["DSR_SIZE"] * i)
binaryLine = self.read_binary_line(lineOffset, fmtString, adsWidth)
array = np.append(array, binaryLine)
adsHeight += 1
array = array.reshape(adsHeight, adsWidth)
#array = self.resize_dop_array(adsHeight, adsWidth, array)
# adjust the scale
if '(10)^-6' in adsParams['units']:
array /= 1000000.0
adsParams['units'] = adsParams['units'].replace('(10)^-6 ', '')
# create VRT from the array
adsVrt = VRT(array=array)
# add "name" and "units" to band metadata
bandMetadata = {"name": adsName, "units": adsParams['units']}
adsVrt.dataset.GetRasterBand(1).SetMetadata(bandMetadata)
return adsVrt
Parameters
----------
dst_srs : proj4, WKT, NSR, EPSG
Destiination SRS given as any NSR input parameter
Notes
-----
Reprojects all GCPs to new SRS and updates GCPProjection
"""
# transform coordinates of original GCPs
dst_srs = NSR(dst_srs)
src_srs = NSR(self.dataset.GetGCPProjection())
# make three tuples with X, Y and Z values of GCPs
src_points = list(zip(*[(gcp.GCPX, gcp.GCPY, gcp.GCPZ) for gcp in self.dataset.GetGCPs()]))
dst_points = VRT.transform_coordinates(src_srs, src_points, dst_srs)
# create new GCPs
dst_gcps = []
for p in zip(self.dataset.GetGCPs(), *dst_points):
dst_gcp = gdal.GCP(p[1], p[2], p[3],
p[0].GCPPixel, p[0].GCPLine, p[0].Info, p[0].Id)
dst_gcps.append(dst_gcp)
# Update dataset
self.dataset.SetGCPs(dst_gcps, dst_srs.wkt)
self.dataset.FlushCache()
dst_wkt = src_vrt._set_fake_gcps(dst_srs, dst_gcps, skip_gcps)
if resize_only:
src_vrt._set_geotransform_for_resize()
else:
src_vrt._set_gcps_geolocation_geotransform()
# create Warped VRT GDAL Dataset
warped_dataset = gdal.AutoCreateWarpedVRT(src_vrt.dataset, None, dst_wkt, resample_alg)
# check if Warped VRT was created
if warped_dataset is None:
raise NansatGDALError('Cannot create warpedVRT!')
# create VRT object from Warped VRT GDAL Dataset
warped_vrt = VRT.copy_dataset(warped_dataset)
# set x/y size, geo_transform, block_size
warped_vrt._update_warped_vrt_xml(x_size, y_size, geo_transform, block_size, working_data_type)
# apply thin-spline-transformation option
if self.tps:
warped_vrt.write_xml(warped_vrt.xml.replace('GCPTransformer', 'TPSTransformer'))
# if given, add dst GCPs
if len(dst_gcps) > 0:
warped_vrt.dataset.SetGCPs(dst_gcps, dst_srs)
warped_vrt._remove_geotransform()
warped_vrt.dataset.SetProjection(str(''))
# Copy self to warpedVRT
warped_vrt.vrt = self.copy()
np.testing.assert_array_equal(channels_data[0]['LATITUDE'],
channels_data[1]['LATITUDE'])
np.testing.assert_array_equal(channels_data[0]['INCANGLE'],
channels_data[1]['INCANGLE'])
np.testing.assert_array_equal(channels_data[0]['HEADING'],
channels_data[1]['HEADING'])
np.testing.assert_array_equal(channels_data[0]['PREDDOPFREQ'],
channels_data[1]['PREDDOPFREQ'])
longitude = channels_data[0]['LONGITUDE']
latitude = channels_data[0]['LATITUDE']
VRT.__init__(self, lon = longitude, lat = latitude)
incVRT = VRT(array = channels_data[0]['INCANGLE'], lon = longitude,
lat = latitude)
azVRT = VRT(array = np.mod(channels_data[0]['HEADING'] + 90., 360.),
lon = longitude, lat = latitude)
dcpVRT = VRT(array = channels_data[0]['PREDDOPFREQ'], lon = longitude,
lat = latitude)
metaDict = []
self.bandVRTs['incVRT'] = incVRT
self.bandVRTs['azVRT'] = azVRT
self.bandVRTs['dcpVRT'] = dcpVRT
metaDict.append({
'src': {
'SourceFilename': self.bandVRTs['incVRT'].fileName,
'SourceBand': 1
},
'dst': {
'wkv': 'angle_of_incidence',
'name': 'incidence_angle'