Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_write_invalid_numeric_layer(path_coutwildrnp_shp, tmpdir):
with pytest.raises(ValueError):
fiona.open(str(tmpdir.join("test-no-iter.shp")), mode='w', layer=0)
"latitude": 45.66894,
"longitude": 87.91166
},
"tricky": "{gotcha"
}
}
]
}
"""
tmpdir = tempfile.mkdtemp()
filename = os.path.join(tmpdir, 'test.json')
with open(filename, 'w') as f:
f.write(data)
with fiona.open(filename) as src:
ftr = next(iter(src))
props = ftr['properties']
assert props['upperLeftCoordinate']['latitude'] == 45.66894
assert props['upperLeftCoordinate']['longitude'] == 87.91166
assert props['tricky'] == "{gotcha"
def test_field_truncation_issue177(tmpdir):
name = str(tmpdir.join('output.shp'))
kwargs = {
'driver': 'ESRI Shapefile',
'crs': 'EPSG:4326',
'schema': {
'geometry': 'Point',
'properties': [('a_fieldname', 'float')]}}
with fiona.open(name, 'w', **kwargs) as dst:
rec = {}
rec['geometry'] = {'type': 'Point', 'coordinates': (0, 0)}
rec['properties'] = {'a_fieldname': 3.0}
dst.write(rec)
with fiona.open(name) as src:
first = next(iter(src))
assert first['geometry'] == {'type': 'Point', 'coordinates': (0, 0)}
assert first['properties']['a_fieldnam'] == 3.0
c.execute('''CREATE TABLE IF NOT EXISTS countries (
name TEXT COLLATE NOCASE,
poly BLOB,
fips TEXT,
iso2 TEXT,
iso3 TEXT,
un INT,
area INT,
lon REAL,
lat REAL
)''')
c.execute('CREATE INDEX IF NOT EXISTS countryname ON countries (name)')
conn.commit()
import fiona
fi = fiona.open(r"D:/ds/tm_world/TM_WORLD_BORDERS-0.3.shp", 'r')
count = 0
while True:
try:
a = fi.next()
except:
break
di = a['properties']
di['poly'] = str(a['geometry'])
count += 1
c.execute("INSERT INTO countries VALUES (:NAME, :poly, :FIPS, :ISO2, :ISO3, :UN, :AREA, :LON, :LAT)", di)
print (count)
def save_shapefile(cdsdata, path, template):
"""
Accepts bokeh ColumnDataSource data and saves it as a shapefile,
using an existing template to determine the required schema.
"""
collection = fiona.open(template)
arrays = [np.column_stack([xs, ys]) for xs, ys in zip(cdsdata['xs'], cdsdata['ys'])]
polys = gv.Polygons(arrays, crs=ccrs.GOOGLE_MERCATOR)
projected = gv.operation.project_path(polys, projection=ccrs.PlateCarree())
data = [list(map(tuple, arr)) for arr in projected.split(datatype='array')]
shape_data = list(collection.items())[0][1]
shape_data['geometry']['coordinates'] = data
with fiona.open(path, 'w', collection.driver, collection.schema, collection.crs) as c:
c.write(shape_data)
def catchment_boundaries(catch_path):
"""Collect all catchment boundaries into a MultiLineString"""
geoms = []
with fiona.open(catch_path) as c:
for f in c:
g = sg.shape(f["geometry"]).boundary
geoms.append(g)
return unary_union(geoms)
def shape_(input_shape):
with fiona.open(input_shape) as fiona_collection:
# In this case, we'll assume the shapefile only has one record/layer
shapefile_record = fiona_collection.next()
# Use Shapely to create the polygon
return geometry.asShape( shapefile_record['geometry'] )
def main(shapefile):
with fiona.open(shapefile, 'r') as shp:
streets = collect_streets(shp)
sys.stderr.write(u'Dumping...\n')
sys.stdout.write('''{"type":"FeatureCollection","features":[''')
first = True
for street, segments in cluster_streets(streets):
for segment in segments:
if first:
first = False
else:
sys.stdout.write(',')
json.dump(segment.geojson(), sys.stdout)
sys.stdout.write(']}')
def ReadShapeFile(ShapeFile):
"""
Open shapefile and create Polygon dictionary
returns dictionary of shapely Polygons
MDH
"""
#open shapefile and read shapes
Shapes = fiona.open(ShapeFile)
# get the input coordinate system
Input_CRS = Proj(Shapes.crs)
# Create a dictionary of shapely polygons
PolygonDict = {}
# loop through shapes and add to dictionary
for Feature in Shapes:
if Feature['geometry']['type'] == 'MultiPolygon':
Shape = MultiPolygon(shape(Feature['geometry']))
Value = float(Feature['properties']['ID'])
elif Feature['geometry']['type'] == 'Polygon':
Shape = Polygon(shape(Feature['geometry']))
Value = float(Feature['properties']['ID'])
def load_local_authority_districts():
"""
Load in Local Authority District (LAD) shapes and extract id information.
"""
lads = []
lad_shapes = os.path.join(
SHAPES_INPUT_PATH, 'lad_uk_2016-12', 'lad_uk_2016-12.shp'
)
with fiona.open(lad_shapes, 'r') as lad_shape:
for lad in lad_shape:
if not lad['properties']['name'].startswith((
'E06000053',
'S12000027',
'N09000001',
'N09000002',
'N09000003',
'N09000004',
'N09000005',
'N09000006',
'N09000007',
'N09000008',
'N09000009',
'N09000010',
'N09000011',
)):