Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
pfnProgress, pProgressArg):
print(
"Terminating translation prematurely after failed\n" +
"translation from sql statement.")
return False
poDS.ReleaseResultSet(poResultSet)
#/* -------------------------------------------------------------------- */
#/* Special case for layer interleaving mode. */
#/* -------------------------------------------------------------------- */
elif bSrcIsOSM and gdal.GetConfigOption("OGR_INTERLEAVED_READING", None) is None:
gdal.SetConfigOption("OGR_INTERLEAVED_READING", "YES")
# if (bSplitListFields)
#{
# fprintf( stderr, "FAILURE: -splitlistfields not supported in this mode\n" );
# exit( 1 );
#}
nSrcLayerCount = poDS.GetLayerCount()
pasAssocLayers = [AssociatedLayers() for i in range(nSrcLayerCount)]
#/* -------------------------------------------------------------------- */
#/* Special case to improve user experience when translating into */
#/* single file shapefile and source has only one layer, and that */
#/* the layer name isn't specified */
#/* -------------------------------------------------------------------- */
for quality in [90, 75, 30]:
src_ds = gdal.Open('../gdrivers/data/utm.tif')
ds = gdal.GetDriverByName('GTiff').Create('tmp/tiff_write_91.tif', 1024, 1024, 3, \
options = [ 'COMPRESS=JPEG', 'PHOTOMETRIC=YCBCR', 'JPEG_QUALITY=%d' % quality ])
data = src_ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512, 1024, 1024)
ds.GetRasterBand(1).WriteRaster(0, 0, 1024, 1024, data)
ds.GetRasterBand(2).WriteRaster(0, 0, 1024, 1024, data)
ds.GetRasterBand(3).WriteRaster(0, 0, 1024, 1024, data)
ds = None
ds = gdal.Open('tmp/tiff_write_91.tif', gdal.GA_Update)
gdal.SetConfigOption('JPEG_QUALITY_OVERVIEW', '%d' % quality)
ds.BuildOverviews( 'NEAR', overviewlist = [2, 4])
gdal.SetConfigOption('JPEG_QUALITY_OVERVIEW', None)
src_ds = None
ds = None
f = open('tmp/tiff_write_91.tif', 'rb')
f.seek(0, os.SEEK_END)
size = f.tell()
f.close()
print('quality = %d, size = %d' % (quality, size))
if quality != 90:
if size >= last_size:
gdaltest.post_reason('did not get decreasing file sizes')
print(size)
print(last_size)
last_size = 0
for quality in [90, 75, 30]:
try:
os.remove('tmp/tiff_write_93.tif.ovr')
except:
pass
ds = gdal.Open('tmp/tiff_write_93.tif')
gdal.SetConfigOption('COMPRESS_OVERVIEW', 'JPEG')
gdal.SetConfigOption('JPEG_QUALITY_OVERVIEW', '%d' % quality)
gdal.SetConfigOption('PHOTOMETRIC_OVERVIEW', 'YCBCR')
ds.BuildOverviews( 'NEAR', overviewlist = [2, 4])
gdal.SetConfigOption('COMPRESS_OVERVIEW', None)
gdal.SetConfigOption('JPEG_QUALITY_OVERVIEW', None)
gdal.SetConfigOption('PHOTOMETRIC_OVERVIEW', None)
ds = None
f = open('tmp/tiff_write_93.tif.ovr', 'rb')
f.seek(0, os.SEEK_END)
size = f.tell()
f.close()
print('quality = %d, size = %d' % (quality, size))
if quality != 90:
if size >= last_size:
gdaltest.post_reason('did not get decreasing file sizes')
print(size)
print(last_size)
return 'fail'
md = new_ds.GetMetadataItem( 'AREA_OR_POINT' )
new_ds = None
src_ds = None
gt_expected = (440690.0, 60.0, 0.0, 3751350.0, 0.0, -60.0)
if gt != gt_expected:
print(gt)
gdaltest.post_reason( 'did not get expected geotransform when ignoring PixelIsPoint' )
return 'fail'
if md != 'Point':
gdaltest.post_reason( 'did not get expected AREA_OR_POINT value' )
return 'fail'
gdal.SetConfigOption( 'GTIFF_POINT_GEO_IGNORE', 'FALSE' )
# read back this file with pixelispoint behavior enabled.
new_ds = gdal.Open( 'tmp/test_97_2.tif' )
gt = new_ds.GetGeoTransform()
md = new_ds.GetMetadataItem( 'AREA_OR_POINT' )
new_ds = None
gt_expected = (440660.0, 60.0, 0.0, 3751380.0, 0.0, -60.0)
if gt != gt_expected:
print(gt)
gdaltest.post_reason( 'did not get expected geotransform when ignoring PixelIsPoint (2)' )
return 'fail'
len = f.tell()
f.seek(0, 0)
data = f.read(len-1)
f.close()
f = open('tmp/tiff_write_88_src.tif', 'wb')
f.write(data)
f.close()
src_ds = gdal.Open('tmp/tiff_write_88_src.tif')
# for testing only. We need to keep the file to check it was a bigtiff
gdal.SetConfigOption('GTIFF_DELETE_ON_ERROR', 'NO')
gdal.PushErrorHandler('CPLQuietErrorHandler')
ds = gdaltest.tiff_drv.CreateCopy('tmp/tiff_write_88_dst.tif', src_ds,
options = ['TILED=YES', 'COPY_SRC_OVERVIEWS=YES', 'ENDIANNESS=LITTLE'])
gdal.PopErrorHandler()
gdal.SetConfigOption('GTIFF_DELETE_ON_ERROR', None)
ds = None
src_ds = None
f = open('tmp/tiff_write_88_dst.tif', 'rb')
data = f.read(8)
f.close()
os.remove( 'tmp/tiff_write_88_src.tif' )
os.remove( 'tmp/tiff_write_88_dst.tif' )
import struct
ar = struct.unpack('B' * 8, data)
if ar[2] != 43:
gdaltest.post_reason('not a BIGTIFF file')
print(ar)
return 'fail'
def gdal_configurations(reset=False, max_tmpfile_size=2500):
if not reset:
# Whether to enable interleaved reading. Defaults to NO.
gdal.SetConfigOption('OGR_INTERLEAVED_READING', 'YES')
# Whether to enable custom indexing. Defaults to YES.
gdal.SetConfigOption('USE_CUSTOM_INDEXING', 'YES')
# Whether to compress nodes in temporary DB. Defaults to NO.
gdal.SetConfigOption('COMPRESS_NODES', 'YES')
# Maximum size in MB of in-memory temporary file. If it exceeds that value, it will go to disk. Defaults to 100.
gdal.SetConfigOption('MAX_TMPFILE_SIZE', str(max_tmpfile_size))
else:
gdal.SetConfigOption('OGR_INTERLEAVED_READING', 'NO')
gdal.SetConfigOption('USE_CUSTOM_INDEXING', 'YES')
gdal.SetConfigOption('COMPRESS_NODES', 'NO')
gdal.SetConfigOption('MAX_TMPFILE_SIZE', '100')
def gdal_configurations(reset=False, max_tmpfile_size=2500):
if not reset:
# Whether to enable interleaved reading. Defaults to NO.
gdal.SetConfigOption('OGR_INTERLEAVED_READING', 'YES')
# Whether to enable custom indexing. Defaults to YES.
gdal.SetConfigOption('USE_CUSTOM_INDEXING', 'YES')
# Whether to compress nodes in temporary DB. Defaults to NO.
gdal.SetConfigOption('COMPRESS_NODES', 'YES')
# Maximum size in MB of in-memory temporary file. If it exceeds that value, it will go to disk. Defaults to 100.
gdal.SetConfigOption('MAX_TMPFILE_SIZE', str(max_tmpfile_size))
else:
gdal.SetConfigOption('OGR_INTERLEAVED_READING', 'NO')
gdal.SetConfigOption('USE_CUSTOM_INDEXING', 'YES')
gdal.SetConfigOption('COMPRESS_NODES', 'NO')
gdal.SetConfigOption('MAX_TMPFILE_SIZE', '100')
def gdal_configurations(reset=False):
if not reset:
# Whether to enable interleaved reading. Defaults to NO.
gdal.SetConfigOption('OGR_INTERLEAVED_READING', 'YES')
# Whether to enable custom indexing. Defaults to YES.
gdal.SetConfigOption('USE_CUSTOM_INDEXING', 'YES')
# Whether to compress nodes in temporary DB. Defaults to NO.
gdal.SetConfigOption('COMPRESS_NODES', 'NO')
# Maximum size in MB of in-memory temporary file. If it exceeds that value, it will go to disk. Defaults to 100.
gdal.SetConfigOption('MAX_TMPFILE_SIZE', '2000')
else:
gdal.SetConfigOption('OGR_INTERLEAVED_READING', 'NO')
gdal.SetConfigOption('USE_CUSTOM_INDEXING', 'YES')
gdal.SetConfigOption('COMPRESS_NODES', 'NO')
gdal.SetConfigOption('MAX_TMPFILE_SIZE', '100')
def gdal_configurations(reset=False):
if not reset:
# Whether to enable interleaved reading. Defaults to NO.
gdal.SetConfigOption('OGR_INTERLEAVED_READING', 'YES')
# Whether to enable custom indexing. Defaults to YES.
gdal.SetConfigOption('USE_CUSTOM_INDEXING', 'YES')
# Whether to compress nodes in temporary DB. Defaults to NO.
gdal.SetConfigOption('COMPRESS_NODES', 'NO')
# Maximum size in MB of in-memory temporary file. If it exceeds that value, it will go to disk. Defaults to 100.
gdal.SetConfigOption('MAX_TMPFILE_SIZE', '2000')
else:
gdal.SetConfigOption('OGR_INTERLEAVED_READING', 'NO')
gdal.SetConfigOption('USE_CUSTOM_INDEXING', 'YES')
gdal.SetConfigOption('COMPRESS_NODES', 'NO')
gdal.SetConfigOption('MAX_TMPFILE_SIZE', '100')